You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/07/18 11:20:17 UTC

[airflow] branch v2-1-test updated (051d4a1 -> 91007ef)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a change to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git.


    from 051d4a1  Update changelog with Python 3.9 support.
     new 8c336c1  Fix permissions for CodeQL workflows (#16660)
     new 00e8072  Remove upstart from docs (#16672)
     new a1e9d43  Add preparation of images as part of RC preparation process (#16674)
     new 91b59e9  Add --executor option to breeze kind-cluster deploy command (#15661)
     new bc49f1f  Use different executors for Helm Chart tests in CI (#15791)
     new 11e73c5  Change default airflow version in Dockerfile (#16714)
     new 0c24da0  Adding missing word to welcome message (#16726)
     new fa813c2  Fix timing out tests for public GitHub Runners. (#16750)
     new 811f516  Breeze should work with new docker-compose fallback (#16743)
     new 061ab9b  Switches to ghcr.io container registry (#16775)
     new cf497c0  Removes coverage from kubernetes tests (#16794)
     new 03a23e0  Remove legacy GitHub Packages (#16776)
     new b7493b6  Fix Airflow releasing guide (#16924)
     new 7c1ce3a  Avoid verification of images multiple times (#16928)
     new 7607ad7  Remove cache for kubernetes tests (#16927)
     new 425f85b  Switch Breeze/CI to ghcr.io excusively (#16780)
     new 079acb1  Pulls latest images to build images in "Build Image" flow (#16948)
     new 4781a59  Fixed parsing issue of _docker.env file for docker-compose v2 (#16950)
     new e3ef80d  Fixes passing variables via docker --env-file command (#16959)
     new f5c64aa  Fixes typo in the name of file for Breeze docker compose env (#16971)
     new e10acaa  Move CI-integration images to ghcr.io (#16797)
     new c450b66  Errors out instead of trying to workaround buggy docker-compose v2 (#16989)
     new 54d2ae9  Fix release guide when copying artifacts (#17001)
     new 5ca1a2e  Drop support for Airflow 1.10 in entrypoint_prod.sh and improve MSSQL compatibility (#17011)
     new 672959c  Fix bug and small improvements in scripts/tools/list-integrations.py (#17004)
     new 4047c78  Dev: Bump stale action to v4 (#17025)
     new 61dc4ca  Fixes "development" and "rc" cross dependencies between providers (#17023)
     new 5c7505f  Updated clean-logs.sh (#16978)
     new 91007ef  Fixes detection of version 2 of docker-compose (#17062)

The 29 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .github/workflows/build-images.yml                 |  15 +-
 .github/workflows/ci.yml                           | 258 +++++++-------
 .github/workflows/codeql-analysis.yml              |   6 +-
 .github/workflows/stale.yml                        |   2 +-
 BREEZE.rst                                         | 210 +++---------
 CI.rst                                             | 374 +++++++--------------
 CONTRIBUTING.rst                                   |   8 +-
 CONTRIBUTORS_QUICK_START.rst                       |   2 -
 Dockerfile                                         |   2 +-
 IMAGES.rst                                         | 220 ++----------
 MANIFEST.in                                        |   1 -
 README.md                                          |  41 +++
 TESTING.rst                                        |   4 -
 breeze                                             | 251 +++++---------
 breeze-complete                                    |  22 +-
 dev/README_RELEASE_AIRFLOW.md                      |  33 +-
 dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md        |   5 +-
 dev/provider_packages/prepare_provider_packages.py |  21 +-
 dev/retag_docker_images.py                         |  65 +---
 docs/apache-airflow/howto/index.rst                |   1 -
 docs/apache-airflow/howto/run-with-upstart.rst     |  44 ---
 docs/docker-stack/entrypoint.rst                   |  26 +-
 images/CI.png                                      | Bin 243004 -> 0 bytes
 images/ci/CI.png                                   | Bin 243004 -> 0 bytes
 images/ci/pull_request_ci_flow.md5                 |   2 +-
 images/ci/pull_request_ci_flow.mermaid             | 110 +++---
 images/ci/pull_request_ci_flow.png                 | Bin 148327 -> 193138 bytes
 images/ci/push_ci_flow.md5                         |   2 +-
 images/ci/push_ci_flow.mermaid                     | 106 +++---
 images/ci/push_ci_flow.png                         | Bin 160006 -> 200079 bytes
 images/ci/scheduled_ci_flow.md5                    |   2 +-
 images/ci/scheduled_ci_flow.mermaid                | 115 +++----
 images/ci/scheduled_ci_flow.png                    | Bin 197719 -> 171813 bytes
 kubernetes_tests/test_kubernetes_pod_operator.py   |   4 +-
 .../test_kubernetes_pod_operator_backcompat.py     |   5 +-
 scripts/ci/docker-compose/base.yml                 |   2 +-
 scripts/ci/docker-compose/integration-kerberos.yml |   2 +-
 scripts/ci/docker-compose/integration-openldap.yml |   2 +-
 scripts/ci/docker-compose/integration-trino.yml    |   2 +-
 scripts/ci/docker-compose/local-prod.yml           |  37 --
 .../ci/dockerfiles/apache-rat/build_and_push.sh    |  11 +-
 scripts/ci/dockerfiles/bats/build_and_push.sh      |  11 +-
 .../dockerfiles/krb5-kdc-server/build_and_push.sh  |  11 +-
 scripts/ci/dockerfiles/stress/build_and_push.sh    |  12 +-
 scripts/ci/dockerfiles/trino/Dockerfile            |   2 +-
 scripts/ci/dockerfiles/trino/build_and_push.sh     |  13 +-
 scripts/ci/images/ci_prepare_ci_image_on_ci.sh     |  12 +-
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh   |  15 +-
 scripts/ci/images/ci_push_ci_images.sh             |   2 +-
 scripts/ci/images/ci_push_production_images.sh     |   2 +-
 .../ci/images/ci_wait_for_and_verify_ci_image.sh   |  22 +-
 .../ci/images/ci_wait_for_and_verify_prod_image.sh |  17 +-
 scripts/ci/kubernetes/ci_run_kubernetes_tests.sh   |   9 +-
 ...tup_cluster_and_deploy_airflow_to_kubernetes.sh |   1 +
 ..._cluster_and_run_kubernetes_tests_single_job.sh |   1 +
 ...lusters_and_run_kubernetes_tests_in_parallel.sh |   1 -
 scripts/ci/libraries/_build_images.sh              | 153 +++------
 scripts/ci/libraries/_initialization.sh            |  41 +--
 scripts/ci/libraries/_kind.sh                      |  46 ++-
 scripts/ci/libraries/_push_pull_remove_images.sh   | 243 ++++---------
 scripts/ci/static_checks/bats_tests.sh             |   7 +-
 scripts/ci/static_checks/check_license.sh          |   2 +-
 .../build_dockerhub.sh}                            |  22 +-
 scripts/ci/tools/ci_clear_tmp.sh                   |  34 --
 .../{ci_fix_ownership.sh => fix_ownership.sh}      |   0
 .../{ci_free_space_on_ci.sh => free_space.sh}      |   0
 scripts/ci/tools/prepare_prod_docker_images.sh     |   2 +-
 scripts/in_container/airflow_ci.cfg                |   1 +
 scripts/in_container/prod/clean-logs.sh            |   8 +-
 scripts/in_container/prod/entrypoint_prod.sh       |  34 +-
 scripts/in_container/run_clear_tmp.sh              |  21 --
 scripts/in_container/run_tmux_welcome.sh           |   2 +-
 scripts/tools/list-integrations.py                 |  12 +-
 scripts/upstart/README                             |  33 --
 scripts/upstart/airflow-flower.conf                |  34 --
 scripts/upstart/airflow-scheduler.conf             |  38 ---
 scripts/upstart/airflow-webserver.conf             |  34 --
 scripts/upstart/airflow-worker.conf                |  34 --
 tests/kubernetes/pod.yaml                          |   2 +-
 tests/kubernetes/test_pod_generator.py             |   4 +-
 80 files changed, 960 insertions(+), 1991 deletions(-)
 delete mode 100644 docs/apache-airflow/howto/run-with-upstart.rst
 delete mode 100644 images/CI.png
 delete mode 100644 images/ci/CI.png
 delete mode 100644 scripts/ci/docker-compose/local-prod.yml
 rename scripts/ci/{images/ci_build_dockerhub.sh => tools/build_dockerhub.sh} (76%)
 delete mode 100755 scripts/ci/tools/ci_clear_tmp.sh
 rename scripts/ci/tools/{ci_fix_ownership.sh => fix_ownership.sh} (100%)
 rename scripts/ci/tools/{ci_free_space_on_ci.sh => free_space.sh} (100%)
 delete mode 100755 scripts/in_container/run_clear_tmp.sh
 delete mode 100644 scripts/upstart/README
 delete mode 100644 scripts/upstart/airflow-flower.conf
 delete mode 100644 scripts/upstart/airflow-scheduler.conf
 delete mode 100644 scripts/upstart/airflow-webserver.conf
 delete mode 100644 scripts/upstart/airflow-worker.conf

[airflow] 16/29: Switch Breeze/CI to ghcr.io excusively (#16780)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 425f85b235b1ea4e329b72fe196001b67df14b41
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Jul 12 17:21:13 2021 +0200

    Switch Breeze/CI to ghcr.io excusively (#16780)
    
    Breeze used traditionally DockerHub to pull images, because
    they were public and GitHub Packages were not. With GitHub Container
    Regisry however, we can switch fully to using GitHub Container
    Registry also for Breeze.
    
    Thanks to moving to Github Container Registry we can remove
    a lot of code responsible for maintaining different naming
    and different versions of the images in DockerHub and
    GitHub Container Registry. Also it streamlines and simplifies
    the process of refreshing the images when new python versions
    are released - the CI push builds will check if the new Python
    image is released in DockerHub and it will rebuild the base
    image automatically if needed (and push it as cache)
    
    The CI documentation (including sequence diagrams) has been
    refreshed to reflect those changes (and other changes done in
    the meantime). The flows are now simplified as DockerHub is
    largely moved out of the picture.
    
    The only remaining DockerHub Images now are:
    
    * images used during CI for integrations (airflow-ci)
    * officially released Production Airflow images (airflow)
    
    The integration images will be moved to GitHub Container Registry
    in a subsequent PR and the only images remaining in DockerHub
    will be the officially released Production Airflow images.
    
    Part of #16555
    
    (cherry picked from commit af0598f5b86b577626f5c4e5d093b26fd2612629)
---
 .github/workflows/build-images.yml                 |  13 +-
 .github/workflows/ci.yml                           | 130 +++-----
 BREEZE.rst                                         | 178 ++++-------
 CI.rst                                             | 336 +++++++--------------
 CONTRIBUTING.rst                                   |   8 +-
 CONTRIBUTORS_QUICK_START.rst                       |   2 -
 IMAGES.rst                                         | 135 ++-------
 README.md                                          |  41 +++
 TESTING.rst                                        |   4 -
 breeze                                             | 166 +++-------
 breeze-complete                                    |  14 +-
 dev/README_RELEASE_AIRFLOW.md                      |   5 +-
 dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md        |   5 +-
 dev/retag_docker_images.py                         |  65 +---
 images/CI.png                                      | Bin 243004 -> 0 bytes
 images/ci/CI.png                                   | Bin 243004 -> 0 bytes
 images/ci/pull_request_ci_flow.md5                 |   2 +-
 images/ci/pull_request_ci_flow.mermaid             | 110 ++++---
 images/ci/pull_request_ci_flow.png                 | Bin 148327 -> 193138 bytes
 images/ci/push_ci_flow.md5                         |   2 +-
 images/ci/push_ci_flow.mermaid                     | 106 ++++---
 images/ci/push_ci_flow.png                         | Bin 160006 -> 200079 bytes
 images/ci/scheduled_ci_flow.md5                    |   2 +-
 images/ci/scheduled_ci_flow.mermaid                | 115 ++++---
 images/ci/scheduled_ci_flow.png                    | Bin 197719 -> 171813 bytes
 scripts/ci/docker-compose/base.yml                 |   2 +-
 scripts/ci/docker-compose/local-prod.yml           |  37 ---
 scripts/ci/images/ci_prepare_ci_image_on_ci.sh     |  12 +-
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh   |  15 +-
 scripts/ci/images/ci_push_ci_images.sh             |   2 +-
 scripts/ci/images/ci_push_production_images.sh     |   2 +-
 .../ci/images/ci_wait_for_and_verify_ci_image.sh   |  14 +-
 .../ci/images/ci_wait_for_and_verify_prod_image.sh |  15 +-
 scripts/ci/libraries/_build_images.sh              | 149 +++------
 scripts/ci/libraries/_initialization.sh            |  32 +-
 scripts/ci/libraries/_kind.sh                      |  42 ++-
 scripts/ci/libraries/_push_pull_remove_images.sh   | 207 ++++---------
 .../build_dockerhub.sh}                            |  21 +-
 scripts/ci/tools/ci_clear_tmp.sh                   |  34 ---
 .../{ci_fix_ownership.sh => fix_ownership.sh}      |   0
 .../{ci_free_space_on_ci.sh => free_space.sh}      |   0
 scripts/ci/tools/prepare_prod_docker_images.sh     |   2 +-
 scripts/in_container/airflow_ci.cfg                |   1 +
 scripts/in_container/run_clear_tmp.sh              |  21 --
 44 files changed, 694 insertions(+), 1353 deletions(-)

diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml
index 9cbae1d..34aa82e 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -34,7 +34,6 @@ env:
   SKIP_CHECK_REMOTE_IMAGE: "true"
   DB_RESET: "true"
   VERBOSE: "true"
-  USE_GITHUB_REGISTRY: "true"
   GITHUB_REPOSITORY: ${{ github.repository }}
   GITHUB_USERNAME: ${{ github.actor }}
   # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the
@@ -150,8 +149,8 @@ jobs:
       UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
       CONTINUE_ON_PIP_CHECK_FAILURE: "true"
       DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
-      FORCE_PULL_BASE_PYTHON_IMAGE: >
-        ${{ github.event_name == 'schedule' && 'true' || 'false' }}
+      CHECK_IF_BASE_PYTHON_IMAGE_UPDATED: >
+        ${{ github.event_name == 'pull_request_target' && 'false' || 'true' }}
       outputs: ${{toJSON(needs.build-info.outputs) }}
     steps:
       - name: Set envs
@@ -199,7 +198,7 @@ jobs:
           rm -rf "scripts/ci"
           mv "main-airflow/scripts/ci" "scripts"
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Build CI images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Push CI images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}"
@@ -225,8 +224,8 @@ jobs:
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
       DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
-      FORCE_PULL_BASE_PYTHON_IMAGE: >
-        ${{ github.event_name == 'schedule' && 'true' || 'false' }}
+      CHECK_IF_BASE_PYTHON_IMAGE_UPDATED: >
+        ${{ github.event_name == 'pull_request_target' && 'false' || 'true' }}
       VERSION_SUFFIX_FOR_PYPI: ".dev0"
     steps:
       - name: Set envs
@@ -274,7 +273,7 @@ jobs:
           rm -rf "scripts/ci"
           mv "main-airflow/scripts/ci" "scripts"
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Build CI images ${{ matrix.python-version }}:${{ env.TARGET_COMMIT_SHA }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
         # Pull images built in the previous step
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index d13a6d5..c95132a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -36,7 +36,6 @@ env:
   DB_RESET: "true"
   VERBOSE: "true"
   DOCKER_CACHE: "pulled"
-  USE_GITHUB_REGISTRY: "true"
   GITHUB_REPOSITORY: ${{ github.repository }}
   GITHUB_USERNAME: ${{ github.actor }}
   # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the
@@ -216,6 +215,30 @@ jobs:
             ./scripts/ci/selective_ci_checks.sh
           fi
 
+  tests-ui:
+    timeout-minutes: 10
+    name: React UI tests
+    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
+    needs: [build-info]
+    if: needs.build-info.outputs.run-ui-tests == 'true'
+    steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          persist-credentials: false
+      - name: "Setup node"
+        uses: actions/setup-node@v2
+        with:
+          node-version: 14
+      - name: "Cache eslint"
+        uses: actions/cache@v2
+        with:
+          path: 'airflow/ui/node_modules'
+          key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }}
+      - run: yarn --cwd airflow/ui/ install --frozen-lockfile --non-interactive
+      - run: yarn --cwd airflow/ui/ run test
+
+
   test-openapi-client-generation:
     timeout-minutes: 10
     name: "Test OpenAPI client generation"
@@ -244,7 +267,7 @@ jobs:
           fetch-depth: 2
           persist-credentials: false
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
         if: |
           needs.build-info.outputs.waitForImage == 'true'
       - name: "Setup python"
@@ -277,7 +300,7 @@ jobs:
           python-version: ${{needs.build-info.outputs.defaultPythonVersion}}
         if: needs.build-info.outputs.waitForImage == 'true'
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
         if: |
           needs.build-info.outputs.waitForImage == 'true'
       - name: >
@@ -316,7 +339,7 @@ jobs:
         with:
           python-version: ${{needs.build-info.outputs.defaultPythonVersion}}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Get Python version"
@@ -417,7 +440,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           persist-credentials: false
           submodules: recursive
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Fetch inventory versions"
@@ -471,7 +494,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Prepare provider documentation"
@@ -519,7 +542,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Prepare provider packages: sdist"
@@ -561,7 +584,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Tests: Helm"
@@ -618,7 +641,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Tests: ${{needs.build-info.outputs.testTypes}}"
@@ -674,7 +697,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Tests: ${{needs.build-info.outputs.testTypes}}"
@@ -727,7 +750,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Tests: ${{needs.build-info.outputs.testTypes}}"
@@ -790,7 +813,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         run: |
           echo "ISSUE_ID=10128" >> $GITHUB_ENV
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Tests: Quarantined"
@@ -830,13 +853,16 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     continue-on-error: true
     needs:
       - build-info
-      - tests-kubernetes
       - tests-postgres
       - tests-sqlite
       - tests-mysql
       - tests-quarantined
     env:
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
+    # Only upload coverage on merges to main
+    if: >
+      github.ref == 'refs/heads/main' && github.repository == 'apache/airflow' &&
+      github.event_name == 'push'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -877,7 +903,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
         if: needs.build-info.outputs.waitForImage == 'true'
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
         if: |
           needs.build-info.outputs.waitForImage == 'true'
       - name: >
@@ -929,7 +955,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Get all PROD images"
         run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
         env:
@@ -983,7 +1009,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Get all PROD images"
         run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
         env:
@@ -1054,7 +1080,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: Set push-python-image
         id: push-python-image
         run: |
@@ -1114,7 +1140,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Push CI image ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
@@ -1154,7 +1180,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         with:
           python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }}
       - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+        run: ./scripts/ci/tools/free_space.sh
       - name: >
           Wait for CI images
           ${{ needs.build-info.outputs.pythonVersions }}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}
@@ -1190,69 +1216,3 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           github_token: ${{ secrets.GITHUB_TOKEN }}
           branch: ${{ steps.constraints-branch.outputs.branch }}
           directory: "repo"
-
-  tag-repo-nightly:
-    timeout-minutes: 10
-    name: "Tag repo nightly"
-    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-    needs:
-      - docs
-      - build-info
-      - static-checks
-      - tests-sqlite
-      - tests-postgres
-      - tests-mysql
-      - tests-kubernetes
-      - constraints
-      - prepare-test-provider-packages-wheel
-      - prepare-test-provider-packages-sdist
-    if: github.event_name == 'schedule' &&  github.repository == 'apache/airflow'
-    env:
-      RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-    steps:
-      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
-        uses: actions/checkout@v2
-        with:
-          persist-credentials: false
-          submodules: recursive
-      - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
-      - name: "Tag commit"
-        run: |
-          BRANCH_NAME=$(echo "${{ github.ref }}" | sed 's/refs\/heads\///')
-          echo "Tagging ${BRANCH_NAME}"
-          git tag -f nightly-${BRANCH_NAME} HEAD
-      - name: "Push tags"
-        uses: ./.github/actions/github-push-action
-        with:
-          github_token: ${{ secrets.GITHUB_TOKEN }}
-          tags: true
-          force: true
-          branch: main
-
-  tests-ui:
-    timeout-minutes: 10
-    name: React UI tests
-    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-    needs: [build-info, ci-images]
-    if: needs.build-info.outputs.run-ui-tests == 'true'
-    steps:
-      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
-        uses: actions/checkout@v2
-        with:
-          persist-credentials: false
-      - name: "Setup node"
-        uses: actions/setup-node@v2
-        with:
-          node-version: 14
-      - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
-      - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
-        run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-      - name: "Cache eslint"
-        uses: actions/cache@v2
-        with:
-          path: 'airflow/ui/node_modules'
-          key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }}
-      - run: yarn --cwd airflow/ui/ install --frozen-lockfile --non-interactive
-      - run: yarn --cwd airflow/ui/ run test
diff --git a/BREEZE.rst b/BREEZE.rst
index 0bb83fe..7e6bedd 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -236,7 +236,8 @@ for details.
   ./breeze
 
 The First time you run Breeze, it pulls and builds a local version of Docker images.
-It pulls the latest Airflow CI images from `Airflow DockerHub <https://hub.docker.com/r/apache/airflow-ci>`_
+It pulls the latest Airflow CI images from the
+`GitHub Container Registry <https://github.com/orgs/apache/packages?repo_name=airflow>`_
 and uses them to build your local Docker images. Note that the first run (per python) might take up to 10
 minutes on a fast connection to start. Subsequent runs should be much faster.
 
@@ -538,11 +539,7 @@ Building CI images
 With Breeze you can build images that are used by Airflow CI and production ones.
 
 For all development tasks, unit tests, integration tests, and static code checks, we use the
-**CI image** maintained on the DockerHub in the ``apache/airflow-ci`` repository.
-This Docker image contains a lot of test-related packages (size of ~1GB).
-Its tag follows the pattern of ``<BRANCH>-python<PYTHON_MAJOR_MINOR_VERSION>-ci``
-(for example, ``apache/airflow:main-python3.6-ci`` or ``apache/airflow-ci:v2-1-test-python3.6-ci``).
-The image is built using the `<Dockerfile.ci>`_ Dockerfile.
+**CI image** maintained in GitHub Container Registry.
 
 The CI image is built automatically as needed, however it can be rebuilt manually with
 ``build-image`` command. The production
@@ -634,12 +631,9 @@ default is to build ``both`` type of packages ``sdist`` and ``wheel``.
 Building Production images
 --------------------------
 
-The **Production image** is also maintained on the DockerHub in both ``apache/airflow`` (for tagged and latest
-releases) or ``apache/airflow-ci`` repository (for branches). This Docker image (built using official
-Dockerfile) contains size-optimised Airflow installation with selected extras and dependencies. Its tag follows
-the pattern of ``<BRANCH>-python<PYTHON_MAJOR_MINOR_VERSION>`` (for example, ``apache/airflow-ci:main-python3.6``
-or ``apache/airflow-ci:v2-1-test-python3.6``) or in case of production images tagged with releases
-``apache/airflow:2.1.2-python3.8`` or ``apache/airflow:latest`` or ``apache/airflow:latest-python3.8``.
+The **Production image** is also maintained in GitHub Container Registry for Caching
+and in ``apache/airflow`` manually pushed for released versions. This Docker image (built using official
+Dockerfile) contains size-optimised Airflow installation with selected extras and dependencies.
 
 However in many cases you want to add your own custom version of the image - with added apt dependencies,
 python dependencies, additional Airflow extras. Breeze's ``build-image`` command helps to build your own,
@@ -1007,7 +1001,7 @@ by the root user, you can fix the ownership of those files by running this scrip
 
 .. code-block::
 
-  ./scripts/ci/tools/ci_fix_ownership.sh
+  ./scripts/ci/tools/fix_ownership.sh
 
 Mounting Local Sources to Breeze
 --------------------------------
@@ -1200,13 +1194,12 @@ This is the current syntax for  `./breeze <./breeze>`_:
         'breeze shell -- -c "ls -la"'
         'breeze -- -c "ls -la"'
 
-        For DockerHub pull: --dockerhub-user and --dockerhub-repo flags can be used to specify
-        the repository to pull from. For GitHub repository, the --github-repository
-        flag can be used for the same purpose. You can also use --github-image-id <COMMIT_SHA> in case
+        For GitHub repository, the --github-repository flag can be used to specify the repository
+        to pull and push images. You can also use --github-image-id <COMMIT_SHA> in case
         you want to pull the image with specific COMMIT_SHA tag.
 
         'breeze shell \
-              --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
+              -- github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
         'breeze \
               --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
 
@@ -1248,14 +1241,16 @@ This is the current syntax for  `./breeze <./breeze>`_:
            '--build-cache-local' or '-build-cache-pulled', or '--build-cache-none'
 
         Choosing whether to force pull images or force build the image:
-            '--force-build-image',
-             '--force-pull-image', '--force-pull-base-python-image'
+            '--force-build-image', '--force-pull-image'
+
+        Checking if the base python image has been updated:
+            '--check-if-base-python-image-updated'
 
         You can also pass '--production-image' flag to build production image rather than CI image.
 
-        For DockerHub pulling of base images: '--dockerhub-user' and '--dockerhub-repo' flags can be
-        used to specify the repository to pull from. For GitHub repository, the '--github-repository'
-        flag can be used for the same purpose.
+        For GitHub repository, the '--github-repository' can be used to choose repository
+        to pull/push images.
+
   Flags:
 
   -p, --python PYTHON_MAJOR_MINOR_VERSION
@@ -1296,16 +1291,15 @@ This is the current syntax for  `./breeze <./breeze>`_:
           package-related files, but you can force it using this flag.
 
   -P, --force-pull-images
-          Forces pulling of images from DockerHub before building to populate cache. The
-          images are pulled by default only for the first time you run the
+          Forces pulling of images from GitHub Container Registry before building to populate cache.
+          The images are pulled by default only for the first time you run the
           environment, later the locally build images are used as cache.
 
-  --force-pull-base-python-image
-          Forces pulling of Python base image from DockerHub before building to
-          populate cache. This should only be run in case we need to update to latest available
-          Python base image. This should be a rare and manually triggered event. Also this flag
-          is used in the scheduled run in CI when we rebuild all the images from the scratch
-          and run the tests to see if the latest python images do not fail our tests.
+  --check-if-base-python-image-updated
+          Checks if Python base image from DockerHub has been updated vs the current python base
+          image we store in GitHub Container Registry. Python images are updated regularly with
+          security fixes, this switch will check if a new one has been released and will pull and
+          prepare a new base python based on the latest one.
 
   --cleanup-docker-context-files
           Removes whl and tar.gz files created in docker-context-files before running the command.
@@ -1416,14 +1410,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
           This is default strategy used by the Production image builds.
 
   -U, --build-cache-pulled
-          Uses images pulled from registry (either DockerHub or GitHub depending on
-          --use-github-registry flag) to build images. The pulled images will be used as cache.
+          Uses images pulled from GitHub Container Registry to build images.
           Those builds are usually faster than when ''--build-cache-local'' with the exception if
-          the registry images are not yet updated. The DockerHub images are updated nightly and the
-          GitHub images are updated after merges to main so it might be that the images are still
-          outdated vs. the latest version of the Dockerfiles you are using. In this case, the
-          ''--build-cache-local'' might be faster, especially if you iterate and change the
-          Dockerfiles yourself.
+          the registry images are not yet updated. The images are updated after successful merges
+          to main.
 
           This is default strategy used by the CI image builds.
 
@@ -1434,23 +1424,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
           This strategy is used by default for both Production and CI images for the scheduled
           (nightly) builds in CI.
 
-  -D, --dockerhub-user DOCKERHUB_USER
-          DockerHub user used to pull, push and build images. Default: apache.
-
-  -H, --dockerhub-repo DOCKERHUB_REPO
-          DockerHub repository used to pull, push, build images. Default: airflow-ci.
-
-  -c, --use-github-registry
-          If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
-          DockerHub. You need to be logged in to the registry in order to be able to pull/push from
-          and you need to be committer to push to Apache Airflow' GitHub registry.
-
   -g, --github-repository GITHUB_REPOSITORY
-          GitHub repository used to pull, push images when cache is used.
+          GitHub repository used to pull, push images.
           Default: apache/airflow.
 
-          If you use this flag, automatically --use-github-registry flag is enabled.
-
   -v, --verbose
           Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
           debugging - when you run breeze with --verbose flags you will be able to see the commands
@@ -1573,24 +1550,17 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
   breeze push_image [FLAGS]
 
-        Pushes images to docker registry. You can push the images to DockerHub registry (default)
-        or to the GitHub registry (if --use-github-registry flag is used).
-
-        For DockerHub pushes: --dockerhub-user and --dockerhub-repo flags can be used to specify
-        the repository to push to. For GitHub repository, the --github-repository
-        flag can be used for the same purpose. You can also add
-        --github-image-id <COMMIT_SHA> in case you want to push image with specific
-        SHA tag. In case you specify --github-repository or --github-image-id, you
-        do not need to specify --use-github-registry flag.
+        Pushes images to GitHub registry.
 
+        You can add --github-repository to push to a different repository/organisation.
+        You can add --github-image-id <COMMIT_SHA> in case you want to push image with specific
+        SHA tag.
         You can also add --production-image flag to switch to production image (default is CI one)
 
         Examples:
 
         'breeze push-image' or
-        'breeze push-image --dockerhub-user user' to push to your private registry or
         'breeze push-image --production-image' - to push production image or
-        'breeze push-image --use-github-registry' - to push to GitHub image registry or
         'breeze push-image \
               --github-repository user/airflow' - to push to your user's fork
         'breeze push-image \
@@ -1598,23 +1568,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
   Flags:
 
-  -D, --dockerhub-user DOCKERHUB_USER
-          DockerHub user used to pull, push and build images. Default: apache.
-
-  -H, --dockerhub-repo DOCKERHUB_REPO
-          DockerHub repository used to pull, push, build images. Default: airflow-ci.
-
-  -c, --use-github-registry
-          If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
-          DockerHub. You need to be logged in to the registry in order to be able to pull/push from
-          and you need to be committer to push to Apache Airflow' GitHub registry.
-
   -g, --github-repository GITHUB_REPOSITORY
-          GitHub repository used to pull, push images when cache is used.
+          GitHub repository used to pull, push images.
           Default: apache/airflow.
 
-          If you use this flag, automatically --use-github-registry flag is enabled.
-
 
 
 
@@ -1625,8 +1582,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
           automatically pull and use that image so that you can easily reproduce a problem
           that occurred in CI.
 
-          If you use this flag, automatically --use-github-registry is enabled.
-
           Default: latest.
 
   -v, --verbose
@@ -1930,16 +1885,15 @@ This is the current syntax for  `./breeze <./breeze>`_:
           package-related files, but you can force it using this flag.
 
   -P, --force-pull-images
-          Forces pulling of images from DockerHub before building to populate cache. The
-          images are pulled by default only for the first time you run the
+          Forces pulling of images from GitHub Container Registry before building to populate cache.
+          The images are pulled by default only for the first time you run the
           environment, later the locally build images are used as cache.
 
-  --force-pull-base-python-image
-          Forces pulling of Python base image from DockerHub before building to
-          populate cache. This should only be run in case we need to update to latest available
-          Python base image. This should be a rare and manually triggered event. Also this flag
-          is used in the scheduled run in CI when we rebuild all the images from the scratch
-          and run the tests to see if the latest python images do not fail our tests.
+  --check-if-base-python-image-updated
+          Checks if Python base image from DockerHub has been updated vs the current python base
+          image we store in GitHub Container Registry. Python images are updated regularly with
+          security fixes, this switch will check if a new one has been released and will pull and
+          prepare a new base python based on the latest one.
 
   --cleanup-docker-context-files
           Removes whl and tar.gz files created in docker-context-files before running the command.
@@ -2050,14 +2004,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
           This is default strategy used by the Production image builds.
 
   -U, --build-cache-pulled
-          Uses images pulled from registry (either DockerHub or GitHub depending on
-          --use-github-registry flag) to build images. The pulled images will be used as cache.
+          Uses images pulled from GitHub Container Registry to build images.
           Those builds are usually faster than when ''--build-cache-local'' with the exception if
-          the registry images are not yet updated. The DockerHub images are updated nightly and the
-          GitHub images are updated after merges to main so it might be that the images are still
-          outdated vs. the latest version of the Dockerfiles you are using. In this case, the
-          ''--build-cache-local'' might be faster, especially if you iterate and change the
-          Dockerfiles yourself.
+          the registry images are not yet updated. The images are updated after successful merges
+          to main.
 
           This is default strategy used by the CI image builds.
 
@@ -2520,16 +2470,15 @@ This is the current syntax for  `./breeze <./breeze>`_:
           package-related files, but you can force it using this flag.
 
   -P, --force-pull-images
-          Forces pulling of images from DockerHub before building to populate cache. The
-          images are pulled by default only for the first time you run the
+          Forces pulling of images from GitHub Container Registry before building to populate cache.
+          The images are pulled by default only for the first time you run the
           environment, later the locally build images are used as cache.
 
-  --force-pull-base-python-image
-          Forces pulling of Python base image from DockerHub before building to
-          populate cache. This should only be run in case we need to update to latest available
-          Python base image. This should be a rare and manually triggered event. Also this flag
-          is used in the scheduled run in CI when we rebuild all the images from the scratch
-          and run the tests to see if the latest python images do not fail our tests.
+  --check-if-base-python-image-updated
+          Checks if Python base image from DockerHub has been updated vs the current python base
+          image we store in GitHub Container Registry. Python images are updated regularly with
+          security fixes, this switch will check if a new one has been released and will pull and
+          prepare a new base python based on the latest one.
 
   --cleanup-docker-context-files
           Removes whl and tar.gz files created in docker-context-files before running the command.
@@ -2640,14 +2589,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
           This is default strategy used by the Production image builds.
 
   -U, --build-cache-pulled
-          Uses images pulled from registry (either DockerHub or GitHub depending on
-          --use-github-registry flag) to build images. The pulled images will be used as cache.
+          Uses images pulled from GitHub Container Registry to build images.
           Those builds are usually faster than when ''--build-cache-local'' with the exception if
-          the registry images are not yet updated. The DockerHub images are updated nightly and the
-          GitHub images are updated after merges to main so it might be that the images are still
-          outdated vs. the latest version of the Dockerfiles you are using. In this case, the
-          ''--build-cache-local'' might be faster, especially if you iterate and change the
-          Dockerfiles yourself.
+          the registry images are not yet updated. The images are updated after successful merges
+          to main.
 
           This is default strategy used by the CI image builds.
 
@@ -2661,23 +2606,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
   ****************************************************************************************************
    Flags for pulling/pushing Docker images (both CI and production)
 
-  -D, --dockerhub-user DOCKERHUB_USER
-          DockerHub user used to pull, push and build images. Default: apache.
-
-  -H, --dockerhub-repo DOCKERHUB_REPO
-          DockerHub repository used to pull, push, build images. Default: airflow-ci.
-
-  -c, --use-github-registry
-          If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
-          DockerHub. You need to be logged in to the registry in order to be able to pull/push from
-          and you need to be committer to push to Apache Airflow' GitHub registry.
-
   -g, --github-repository GITHUB_REPOSITORY
-          GitHub repository used to pull, push images when cache is used.
+          GitHub repository used to pull, push images.
           Default: apache/airflow.
 
-          If you use this flag, automatically --use-github-registry flag is enabled.
-
 
 
 
@@ -2688,8 +2620,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
           automatically pull and use that image so that you can easily reproduce a problem
           that occurred in CI.
 
-          If you use this flag, automatically --use-github-registry is enabled.
-
           Default: latest.
 
   ****************************************************************************************************
diff --git a/CI.rst b/CI.rst
index dcc859d..d0eec1b 100644
--- a/CI.rst
+++ b/CI.rst
@@ -58,17 +58,17 @@ For the CI builds of our we are using GitHub Container Registry to store results
 and pass it to the "CI Build" workflow.
 
 Currently in main version of Airflow we run tests in 4 different versions of Python (3.6, 3.7, 3.8, 3.9)
-which means that we have to build 6 images (3 CI ones and 3 PROD ones). Yet we run around 12 jobs
+which means that we have to build 8 images (4 CI ones and 4 PROD ones). Yet we run around 12 jobs
 with each of the CI images. That is a lot of time to just build the environment to run. Therefore
 we are utilising ``pull_request_target`` feature of GitHub Actions.
 
 This feature allows to run a separate, independent workflow, when the main workflow is run -
 this separate workflow is different than the main one, because by default it runs using ``main`` version
-of the sources but also - and most of all - that it has WRITE access to the repository.
+of the sources but also - and most of all - that it has WRITE access to the Github Container Image registry.
 
 This is especially important in our case where Pull Requests to Airflow might come from any repository,
 and it would be a huge security issue if anyone from outside could
-utilise the WRITE access to Apache Airflow repository via an external Pull Request.
+utilise the WRITE access to the Container Image Registry via external Pull Request.
 
 Thanks to the WRITE access and fact that the ``pull_request_target`` by default uses the ``main`` version of the
 sources, we can safely run some logic there will checkout the incoming Pull Request, build the container
@@ -77,16 +77,10 @@ this image can be built only once and used by all the jobs running tests. The im
 ``COMMIT_SHA`` of the incoming Pull Request and the tests run in the Pull Request can simply pull such image
 rather than build it from the scratch. Pulling such image takes ~ 1 minute, thanks to that we are saving
 a lot of precious time for jobs.
-4
-We use `GitHub Container Registry <https://docs.github.com/en/packages/guides/about-github-container-registry>`_
-GitHub Package Registry ``GITHUB_TOKEN`` is needed to push to the registry. You also have to manually manage
-permissions of the images, after creating image for the first time (pushing it using your personal token)
-you need to set their visibility to ``Public`` and enable
-`Inheriting access from repository <https://docs.github.com/en/packages/learn-github-packages/configuring-a-packages-access-control-and-visibility#inheriting-access-for-a-container-image-from-a-repository>`_
-Those images have specific naming schema. See `Images documentation <IMAGES.rst>`_ for details.
 
-You can interact with the GitHub Registry images (pull/push) via `Breeze <BREEZE.rst>`_  - by passing
-``--use-github-registry`` flag.
+We use `GitHub Container Registry <https://docs.github.com/en/packages/guides/about-github-container-registry>`_
+``GITHUB_TOKEN`` is needed to push to the registry and we configured scopes of the tokens in our jobs
+to be able to write to the registry.
 
 Locally replicating CI failures
 -------------------------------
@@ -107,7 +101,6 @@ connected with the run.
 
 You can read more about it in `BREEZE.rst <BREEZE.rst>`_ and `TESTING.rst <TESTING.rst>`_
 
-
 Difference between local runs and GitHub Action workflows
 ---------------------------------------------------------
 
@@ -132,12 +125,6 @@ You can use those variables when you try to reproduce the build locally.
 |                                         |             |             |            | it requires to perform manual init/reset        |
 |                                         |             |             |            | if you stop the environment.                    |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
-| Dockerhub variables                                                                                                                |
-+-----------------------------------------+----------------------------------------+-------------------------------------------------+
-| ``DOCKERHUB_USER``                      |                 apache                 | Name of the DockerHub user to use               |
-+-----------------------------------------+----------------------------------------+-------------------------------------------------+
-| ``DOCKERHUB_REPO``                      |                 airflow                | Name of the DockerHub repository to use         |
-+-----------------------------------------+----------------------------------------+-------------------------------------------------+
 |                                                           Mount variables                                                          |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 | ``MOUNT_SELECTED_LOCAL_SOURCES``        |     true    |    false    |    false   | Determines whether local sources are            |
@@ -325,21 +312,29 @@ You can use those variables when you try to reproduce the build locally.
 Running CI Builds locally
 =========================
 
-The following variables are automatically determined based on CI environment variables.
-You can locally by setting ``CI="true"`` and run the ci scripts from the ``scripts/ci`` folder:
+The scripts and configuration files for CI builds are all in ``scripts/ci`` - so that in the
+``pull_request_target`` target workflow, we can copy those scripts from the ``main`` branch and use them
+regardless of the changes done in the PR. This way we are kept safe from PRs injecting code into the builds.
 
-* ``provider_packages`` - scripts to build and test provider packages
+* ``build_airflow`` - builds airflow packages
 * ``constraints`` - scripts to build and publish latest set of valid constraints
 * ``docs`` - scripts to build documentation
 * ``images`` - scripts to build and push CI and PROD images
 * ``kubernetes`` - scripts to setup kubernetes cluster, deploy airflow and run kubernetes tests with it
+* ``openapi`` - scripts to run openapi generation
+* ``pre_commit`` - scripts to run pre-commit checks
+* ``provider_packages`` - scripts to build and test provider packages
+* ``static_checs`` - scripts to run static checks manually
 * ``testing`` - scripts that run unit and integration tests
-* ``tools`` - scripts that perform various clean-up and preparation tasks
+* ``tools`` - scripts that can be used for various clean-up and preparation tasks
 
-Common libraries of functions for all the scripts can be found in ``libraries`` folder.
+Common libraries of functions for all the scripts can be found in ``libraries`` folder. The ``dockerfiles``,
+``mysql.d``, ``openldap``, ``spectral_rules`` folders contains DockerFiles and configuration of integrations
+needed to run tests.
 
 For detailed use of those scripts you can refer to ``.github/workflows/`` - those scripts are used
-by the CI workflows of ours.
+by the CI workflows of ours. There are some variables that you can set to change the behaviour of the
+scripts.
 
 The default values are "sane"  you can change them to interact with your own repositories or registries.
 Note that you need to set "CI" variable to true in order to get the same results as in CI.
@@ -374,49 +369,20 @@ Note that you need to set "CI" variable to true in order to get the same results
 GitHub Registry Variables
 =========================
 
-Our CI uses GitHub Registry to pull and push images to/from by default. You can however make it interact with
-DockerHub registry or change the GitHub registry to interact with and use your own repo by changing
+Our CI uses GitHub Registry to pull and push images to/from by default. You can use your own repo by changing
 ``GITHUB_REPOSITORY`` and providing your own GitHub Username and Token.
 
-Currently we are using GitHub Packages to cache images for the build. GitHub Packages are "legacy"
-storage of binary artifacts for GitHub and as of September 2020 they introduced GitHub Container Registry
-as more stable, easier to manage replacement for container storage. It includes complete self-management
-of the images including permission management, public access, retention management and many more.
-
-More about it here:
-
-https://github.blog/2020-09-01-introducing-github-container-registry/
-
-Recently we started to experience unstable behaviour of the GitHub Packages ('unknown blob'
-and manifest v1 vs. v2 when pushing images to it). So together with ASF we proposed to
-enable GitHub Container Registry and it happened as of January 2020.
-
-More about it in https://issues.apache.org/jira/browse/INFRA-20959
-
-We are currently in the testing phase, especially when it comes to management of permissions -
-the model of permission management is not the same for Container Registry as it was for GitHub Packages
-(it was per-repository in GitHub Packages, but it is organization-wide in the Container Registry.
-
 +--------------------------------+---------------------------+----------------------------------------------+
 | Variable                       | Default                   | Comment                                      |
 +================================+===========================+==============================================+
-| USE_GITHUB_REGISTRY            | true                      | If set to "true", we interact with GitHub    |
-|                                |                           | Registry registry not the DockerHub one.     |
-+--------------------------------+---------------------------+----------------------------------------------+
-| GITHUB_REGISTRY                | ``ghcr.io``               | Name of the GitHub registry to use. Can be   |
-|                                |                           | ``docker.pkg.github.com`` or ``ghcr.io``     |
-+--------------------------------+---------------------------+----------------------------------------------+
 | GITHUB_REPOSITORY              | ``apache/airflow``        | Prefix of the image. It indicates which.     |
 |                                |                           | registry from GitHub to use                  |
 +--------------------------------+---------------------------+----------------------------------------------+
 | GITHUB_USERNAME                |                           | Username to use to login to GitHub           |
 |                                |                           |                                              |
 +--------------------------------+---------------------------+----------------------------------------------+
-| GITHUB_TOKEN                   |                           | Token to use to login to GitHub. This token  |
-|                                |                           | is automatically set by GitHub CI to a       |
-|                                |                           | to a READ-only token for PR builds from fork |
-|                                |                           | and to WRITE token for direct pushes and     |
-|                                |                           | scheduled or workflow_run types of builds    |
+| GITHUB_TOKEN                   |                           | Token to use to login to GitHub.             |
+|                                |                           | Only used when pushing images on CI.         |
 +--------------------------------+---------------------------+----------------------------------------------+
 | GITHUB_REGISTRY_WAIT_FOR_IMAGE | ``false``                 | Wait for the image to be available. This is  |
 |                                |                           | useful if commit SHA is used as pull tag     |
@@ -431,39 +397,12 @@ the model of permission management is not the same for Container Registry as it
 Authentication in GitHub Registry
 =================================
 
-We are currently in the process of testing using GitHub Container Registry as cache for our images during
-the CI process. The default registry is set to "GitHub Packages", but we are testing the GitHub
-Container Registry. In case of GitHub Packages, authentication uses GITHUB_TOKEN mechanism. Authentication
-is needed for both pushing the images (WRITE) and pulling them (READ) - which means that GitHub token
-is used in "main" build (WRITE) and in fork builds (READ). For container registry, our images are
-Publicly Visible and we do not need any authentication to pull them.
-
-Dockerhub Variables
-===================
-
-If ``USE_GITHUB_REGISTRY`` is set to "false" you can interact directly with DockerHub. By default
-you pull from/push to "apache/airflow" DockerHub repository, but you can change
-that to your own repository by setting those environment variables:
-
-+----------------+----------------+-----------------------------------+
-| Variable       | Default        | Comment                           |
-+================+================+===================================+
-| DOCKERHUB_USER | ``apache``     | Name of the DockerHub user to use |
-+----------------+----------------+-----------------------------------+
-| DOCKERHUB_REPO | ``airflow-ci`` | Name of the DockerHub repo to use |
-+----------------+----------------+-----------------------------------+
+We are using GitHub Container Registry as cache for our images. Authentication uses GITHUB_TOKEN mechanism.
+Authentication is needed for pushing the images (WRITE) only in "push", "pull_request_target" workflows.
 
 CI Architecture
 ===============
 
- .. This image is an export from the 'draw.io' graph available in
-    https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-23+Migrate+out+of+Travis+CI
-    You can edit it there and re-export.
-
-.. image:: images/ci/CI.png
-    :align: center
-    :alt: CI architecture of Apache Airflow
-
 The following components are part of the CI infrastructure
 
 * **Apache Airflow Code Repository** - our code repository at https://github.com/apache/airflow
@@ -472,11 +411,10 @@ The following components are part of the CI infrastructure
 * **GitHub Actions** -  (GA) UI + execution engine for our jobs
 * **GA CRON trigger** - GitHub Actions CRON triggering our jobs
 * **GA Workers** - virtual machines running our jobs at GitHub Actions (max 20 in parallel)
-* **GitHub Private Image Registry**- image registry used as build cache for CI  jobs.
-  It is at https://ghcr.io/apache/airflow/airflow
-* **DockerHub Public Image Registry** - publicly available image registry at DockerHub.
-  It is at https://hub.docker.com/r/apache/airflow-ci
-* **DockerHub Build Workers** - virtual machines running build jibs at DockerHub
+* **GitHub Image Registry**- image registry used as build cache for CI  jobs.
+  It is at https://ghcr.io/apache/airflow
+* **DockerHub Image Registry**- image registry used to pull base Python images and (manually) publish
+  the released Production Airflow images. It is at https://dockerhub.com/apache/airflow
 * **Official Images** (future) - these are official images that are prominently visible in DockerHub.
   We aim our images to become official images so that you will be able to pull them
   with ``docker pull apache-airflow``
@@ -520,7 +458,7 @@ whether it still builds, all tests are green.
 This is needed because some of the conflicting changes from multiple PRs might cause build and test failures
 after merge even if they do not fail in isolation. Also those runs are already reviewed and confirmed by the
 committers so they can be used to do some housekeeping:
-- pushing most recent image build in the PR to the GitHub Private Registry (for caching)
+- pushing most recent image build in the PR to the GitHub Container Registry (for caching)
 - upgrading to latest constraints and pushing those constraints if all tests succeed
 - refresh latest Python base images in case new patch-level is released
 
@@ -528,11 +466,7 @@ The housekeeping is important - Python base images are refreshed with varying fr
 usually but sometimes several times per week) with the latest security and bug fixes.
 Those patch level images releases can occasionally break Airflow builds (specifically Docker image builds
 based on those images) therefore in PRs we only use latest "good" Python image that we store in the
-private GitHub cache. The direct push/main builds are not using registry cache to pull the Python images
-- they are directly pulling the images from DockerHub, therefore they will try the latest images
-after they are released and in case they are fine, CI Docker image is build and tests are passing -
-those jobs will push the base images to the private GitHub Registry so that they be used by subsequent
-PR runs.
+GitHub Container Registry and those push requests will refresh the latest images if they changed.
 
 Scheduled runs
 --------------
@@ -541,10 +475,7 @@ Those runs are results of (nightly) triggered job - only for ``main`` branch. Th
 main purpose of the job is to check if there was no impact of external dependency changes on the Apache
 Airflow code (for example transitive dependencies released that fail the build). It also checks if the
 Docker images can be build from the scratch (again - to see if some dependencies have not changed - for
-example downloaded package releases etc. Another reason for the nightly build is that the builds tags most
-recent main with ``nightly-main`` tag so that DockerHub build can pick up the moved tag and prepare a
-nightly public main build in the DockerHub registry. The ``v1-10-test`` branch images are build in
-DockerHub when pushing ``v1-10-stable`` manually.
+example downloaded package releases etc.
 
 All runs consist of the same jobs, but the jobs behave slightly differently or they are skipped in different
 run categories. Here is a summary of the run categories with regards of the jobs they are running.
@@ -556,7 +487,6 @@ Those runs and their corresponding ``Build Images`` runs are only executed in ma
 repository, they are not executed in forks - we want to be nice to the contributors and not use their
 free build minutes on GitHub Actions.
 
-
 Workflows
 =========
 
@@ -570,34 +500,18 @@ Build Images Workflow
 
 This workflow builds images for the CI Workflow.
 
-It's a special type of workflow: ``pull_request_target`` which means that it is triggered when a pull request is opened.
-This also means that the workflow has Write permission to
-the Airflow repository and it can - for example - push to the GitHub registry the images used by CI Builds
-which means that the images can be built only once and reused by all the CI jobs (including the matrix jobs).
-We've implemented it in the way that the CI Build running will wait until the images are built by the
-"Build Images" workflow.
+It's a special type of workflow: ``pull_request_target`` which means that it is triggered when a pull request
+is opened. This also means that the workflow has Write permission to push to the GitHub registry the images
+used by CI Builds which means that the images can be built only once and reused by all the CI jobs
+(including the matrix jobs). We've implemented it in the way that the CI Build running will wait until the
+images are built by the "Build Images" workflow.
 
-(This workflow is also triggered on normal pushes to our "main" branches, i.e. after a pull request is merged.)
+This workflow is also triggered on normal pushes to our "main" branches, i.e. after a
+pull request is merged and whenever ``scheduled`` run is triggered.
 
 It's possible to disable this feature and go back to the previous behaviour via
 ``GITHUB_REGISTRY_WAIT_FOR_IMAGE`` flag in the "Build Workflow image". Setting it to "false" switches back to
-the behaviour that each job builds own image.
-
-You can also switch back to jobs building the images on its own on the fork level by setting
-``AIRFLOW_GITHUB_REGISTRY_WAIT_FOR_IMAGE`` secret to ``false``. This will disable pushing the "COMMIT_SHA"
-images to GitHub Registry and all the images will be built locally by each job. It is about 20%
-slower for the whole build on average, but it does not require to have access to push images to
-GitHub, which sometimes might be not available (depending on the account status).
-
-The write permission also allows to cancel workflows. It is not possible for the pull request
-"CI Builds" workflow run from the forks as they have no Write permission and cannot cancel running workflows.
-In our case we perform several different cancellations:
-
-* last, but not least - we cancel any of the "CI Build" workflow runs that failed in some important jobs.
-  This is another optimisations - GitHub does not have "fail-fast" on the whole run and this cancelling
-  effectively implements "fail-fast" of runs for some important jobs. Note that it only works when you
-  submit new PRs or push new changes. In case the jobs failed and no new PR is pushed after that, the whole
-  run will run to completion.
+the behaviour that each job builds its own image.
 
 The workflow has the following jobs:
 
@@ -607,11 +521,13 @@ The workflow has the following jobs:
 +===========================+=============================================+
 | Build Info                | Prints detailed information about the build |
 +---------------------------+---------------------------------------------+
-| Build CI/PROD images      | Builds all configured CI and PROD images    |
+| Build CI images           | Builds all configured CI images             |
++---------------------------+---------------------------------------------+
+| Build PROD images         | Builds all configured PROD images           |
 +---------------------------+---------------------------------------------+
 
-The images are stored in the `GitHub Registry <https://github.com/apache/airflow/packages>`_ and the
-names of those images follow the patterns described in
+The images are stored in the `GitHub Container Registry <https://github.com/orgs/apache/packages?repo_name=airflow>`_
+and the names of those images follow the patterns described in
 `Naming conventions for stored images <#naming-conventions-for-stored-images>`_
 
 Image building is configured in "fail-fast" mode. When any of the images
@@ -630,27 +546,25 @@ This workflow is a regular workflow that performs all checks of Airflow code.
 +===========================+==============================================+=======+=======+======+
 | Build info                | Prints detailed information about the build  | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Helm tests                | Runs tests for the Helm chart                | Yes   | Yes   | Yes  |
-+---------------------------+----------------------------------------------+-------+-------+------+
 | Test OpenAPI client gen   | Tests if OpenAPIClient continues to generate | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| CI Images                 | Waits for CI Images (3)                      | Yes   | Yes   | Yes  |
+| UI tests                  | React UI tests for new Airflow UI            | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Static checks             | Performs static checks                       | Yes   | Yes   | Yes  |
+| Test image building       | Tests if PROD image build examples work      | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Build docs                | Builds documentation                         | Yes   | Yes   | Yes  |
+| CI Images                 | Waits for and verify CI Images (3)           | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Spell check docs          | Spell check for documentation                | Yes   | Yes   | Yes  |
+| (Basic) Static checks     | Performs static checks (full or basic)       | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Trigger tests             | Checks if tests should be triggered          | Yes   | Yes   | Yes  |
+| Build docs                | Builds documentation                         | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Tests [Pg/Msql/Sqlite]    | Run all the Pytest tests for Python code     | Yes(2)| Yes   | Yes  |
+| Tests                     | Run all the Pytest tests for Python code     | Yes(2)| Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Quarantined tests         | Flaky tests that we need to fix (5)          | Yes(2)| Yes   | Yes  |
+| Tests provider packages   | Tests if provider packages work              | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Upload coverage           | Uploads test coverage from all the tests     | Yes   | Yes   | Yes  |
+| Upload coverage           | Uploads test coverage from all the tests     | -     | Yes   | -    |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| PROD Images               | Waits for CI Images (3)                      | Yes   | Yes   | Yes  |
+| PROD Images               | Waits for and verify PROD Images (3)         | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
 | Tests Kubernetes          | Run Kubernetes test                          | Yes(2)| Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
@@ -660,11 +574,6 @@ This workflow is a regular workflow that performs all checks of Airflow code.
 +---------------------------+----------------------------------------------+-------+-------+------+
 | Constraints               | Upgrade constraints to latest ones (4)       | -     | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Constraints push          | Pushes all upgraded constraints (4)          | -     | Yes   | Yes  |
-+---------------------------+----------------------------------------------+-------+-------+------+
-| Tag Repo nightly          | Tags the repository with nightly tag (6)     | -     | -     | Yes  |
-+---------------------------+----------------------------------------------+-------+-------+------+
-
 
 Comments:
 
@@ -673,31 +582,10 @@ Comments:
      for example "no-code" changes to build much faster)
  (3) The jobs wait for CI images if ``GITHUB_REGISTRY_WAIT_FOR_IMAGE`` variable is set to "true".
      You can set it to "false" to disable using shared images - this is slower though as the images
-     are rebuilt in every job that needs them. You can also set your own fork's secret
-     ``AIRFLOW_GITHUB_REGISTRY_WAIT_FOR_IMAGE`` to ``false`` to trigger the same behaviour.
- (4) PROD and CI images are pushed as "latest" to DockerHub registry and constraints are upgraded only if all
-     tests are successful. Note that images are not pushed in CRON jobs because they are rebuilt from
-     scratch and we want to push incremental changes to the DockerHub registry.
- (5) Flaky tests never fail in regular builds. See the next chapter where our approach to flaky tests
-     is explained.
- (6) Nightly tag is pushed to the repository only in CRON job and only if all tests pass. This
-     causes the DockerHub images are built automatically and made available to developers.
-
-Force sync main from apache/airflow
--------------------------------------
-
-This is manually triggered workflow (via GitHub UI manual run) that should only be run in GitHub forks.
-When triggered, it will force-push the "apache/airflow" main to the fork's main. It's the easiest
-way to sync your fork main to the Apache Airflow's one.
-
-Delete old artifacts
---------------------
-
-This workflow is introduced, to delete old artifacts from the GitHub Actions build. We set it to
-delete old artifacts that are > 7 days old. It only runs for the 'apache/airflow' repository.
-
-We also have a script that can help to clean-up the old artifacts:
-`remove_artifacts.sh <dev/remove_artifacts.sh>`_
+     are rebuilt in every job that needs them.
+ (4) PROD and CI images are pushed as "latest" to GitHub Container registry and constraints are upgraded
+     only if all tests are successful. Note that images are not pushed in CRON jobs because they are rebuilt
+     from scratch and we want to push incremental changes to the Github Container registry.
 
 CodeQL scan
 -----------
@@ -713,7 +601,10 @@ Documentation from the ``main`` branch is automatically published on Amazon S3.
 To make this possible, GitHub Action has secrets set up with credentials
 for an Amazon Web Service account - ``DOCS_AWS_ACCESS_KEY_ID`` and ``DOCS_AWS_SECRET_ACCESS_KEY``.
 
-This account has permission to write/list/put objects to bucket ``apache-airflow-docs``. This bucket has public access configured, which means it is accessible through the website endpoint. For more information, see: `Hosting a static website on Amazon S3 <https://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteHosting.html>`_
+This account has permission to write/list/put objects to bucket ``apache-airflow-docs``. This bucket has
+public access configured, which means it is accessible through the website endpoint.
+For more information, see:
+`Hosting a static website on Amazon S3 <https://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteHosting.html>`_
 
 Website endpoint: http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com/
 
@@ -721,33 +612,52 @@ Naming conventions for stored images
 ====================================
 
 The images produced during the CI builds are stored in the
-`GitHub Registry <https://github.com/apache/airflow/packages>`_
+`GitHub Container Registry <https://github.com/orgs/apache/packages?repo_name=airflow>`_
 
 The images are stored with both "latest" tag (for last main push image that passes all the tests as well
-with the tags indicating the origin of the image.
-
-The image names follow the patterns:
-
-+--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+
-| Image        | Name pattern               | Tag for format                 | Comment                                                                                    |
-+==============+============================+================================+============================================================================================+
-| Python image | Python                     | <X.Y>-slim-buster-<COMMIT_SHA> | Base Python image used by both production and CI image.                                    |
-|              |                            |                                | Python maintainer release new versions of those image with security fixes every few weeks. |
-+--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+
-| CI image     | <BRANCH>-python<X.Y>-ci    | <COMMIT_SHA>                   | CI image - this is the image used for most of the tests.                                   |
-+--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+
-| PROD Build   | <BRANCH>-python<X.Y>-build | <COMMIT_SHA>                   | Production Build image - this is the "build" segment of production image.                  |
-| image        |                            |                                | It contains build-essentials and all necessary packages to install PIP packages.           |
-+--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+
-| PROD image   | <BRANCH>-python<X.Y>       | <COMMIT_SHA>                   | Production image. This is the actual production image - optimized for size.                |
-|              |                            |                                | It contains only compiled libraries and minimal set of dependencies to run Airflow.        |
-+--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+
-
-* <BRANCH> might be either "main" or "v1-10-test" or "v2-*-test"
-* <X.Y> - Python version (Major + Minor). For "main" and "v2-*-test" should be in ["3.6", "3.7", "3.8", "3.9"].
-* <COMMIT_SHA> - for images that get merged to "main", "v2-*-test" of "v1-10-test", or built as part of a
-  pull request the images are tagged with the (full lenght) commit SHA of that particular branch. For pull
-  requests the SHA used is the tip of the pull request branch.
+with the COMMIT_SHA id for images that were used in particular build.
+
+The image names follow the patterns (except the Python image, all the images are stored in
+https://ghcr.io/ in ``apache`` organization.
+
+The packages are available under:
+
+``https://github.com/apache/airflow/pkgs/container/<CONTAINER_NAME>``
+
++--------------+----------------------------------------------------------+----------------------------------------------------------+
+| Image        | Name:tag (both cases latest version and per-build)       | Description                                              |
++==============+==========================================================+==========================================================+
+| Python image | python:<X.Y>-slim-buster                                 | Base Python image used by both production and CI image.  |
+| (DockerHub)  |                                                          | Python maintainer release new versions of those image    |
+|              |                                                          | with security fixes every few weeks in DockerHub.        |
++--------------+----------------------------------------------------------+----------------------------------------------------------+
+| Airflow      | airflow-python-v2:<X.Y>-slim-buster                      | Version of python base image used in Airflow Builds      |
+| python base  | or                                                       | We keep the "latest" version there and also each build   |
+| image        | airflow-python-v2:<X.Y>-slim-buster-<COMMIT_SHA>         | has an associated specific python version that was used. |
++--------------+----------------------------------------------------------+----------------------------------------------------------+
+| CI image     | airflow-<BRANCH>-python<X.Y>-ci-v2:latest                | CI image - this is the image used for most of the tests. |
+|              | or                                                       | Contains all provider dependencies and tools useful      |
+|              | airflow-<BRANCH>-python<X.Y>-ci-v2:<COMMIT_SHA>          | For testing. This image is used in Breeze.               |
++--------------+----------------------------------------------------------+----------------------------------------------------------+
+| Manifest     | airflow-<BRANCH>-python<X.Y>-ci-v2-manifest:latest       | CI manifest image - this is the image used to optimize   |
+| CI image     | or                                                       | pulls and builds for Breeze development environment      |
+|              | airflow-<BRANCH>-python<X.Y>-ci-v2-manifest:<COMMIT_SHA> | They store hash indicating whether the image will be     |
+|              |                                                          | faster to build or pull.                                 |
++--------------+----------------------------------------------------------+----------------------------------------------------------+
+| PROD Build   | airflow-<BRANCH>-python<X.Y>-build-v2:latest             | Production Build image - this is the "build" segment of  |
+| image        | or                                                       | production image. It contains build-essentials and all   |
+|              | airflow-<BRANCH>-python<X.Y>-build-v2:<COMMIT_SHA>       | necessary packages to install PIP packages.              |
++--------------+----------------------------------------------------------+----------------------------------------------------------+
+| PROD image   | airflow-<BRANCH>-python<X.Y>-v2:latest                   | Production image. This is the actual production image    |
+|              | or                                                       | optimized for size.                                      |
+|              | airflow-<BRANCH>-python<X.Y>-v2:<COMMIT_SHA>             | It contains only compiled libraries and minimal set of   |
+|              |                                                          | dependencies to run Airflow.                             |
++--------------+----------------------------------------------------------+----------------------------------------------------------+
+
+* <BRANCH> might be either "main" or "v2-*-test"
+* <X.Y> - Python version (Major + Minor).Should be one of ["3.6", "3.7", "3.8", "3.9"].
+* <COMMIT_SHA> - full-length SHA of commit either from the tip of the branch (for pushes/schedule) or
+  commit from the tip of the branch used for the PR.
 
 Reproducing CI Runs locally
 ===========================
@@ -770,7 +680,7 @@ cd27124534b46c9688a1d89e75fcd137ab5137e3, in python 3.8 environment you can run:
 
 .. code-block:: bash
 
-  ./breeze --github-image-id cd27124534b46c9688a1d89e75fcd137ab5137e3 --use=github-registry --python 3.8
+  ./breeze --github-image-id cd27124534b46c9688a1d89e75fcd137ab5137e3 --python 3.8
 
 You will be dropped into a shell with the exact version that was used during the CI run and you will
 be able to run pytest tests manually, easily reproducing the environment that was used in CI. Note that in
@@ -809,12 +719,12 @@ Scheduled build flow
 Adding new Python versions to CI
 --------------------------------
 
-In 2.0 line we currently support Python 3.6, 3.7, 3.8.
+In 2.0 line we currently support Python 3.6, 3.7, 3.8, 3.9.
 
-In order to add a new version the following operations should be done (example uses Python 3.9)
+In order to add a new version the following operations should be done (example uses Python 3.10)
 
 * copy the latest constraints in ``constraints-main`` branch from previous versions and name it
-  using the new Python version (``constraints-3.9.txt``). Commit and push
+  using the new Python version (``constraints-3.10.txt``). Commit and push
 
 * add the new Python version to `breeze-complete <breeze-complete>`_ and
   `_initialization.sh <scripts/ci/libraries/_initialization.sh>`_ - tests will fail if they are not
@@ -824,30 +734,14 @@ In order to add a new version the following operations should be done (example u
 
 .. code-block:: bash
 
-  ./breeze build-image --python 3.9
+  ./breeze build-image --python 3.10
 
-* push image as cache to DockerHub and both registries:
+* push image as cache to GitHub:
 
 .. code-block:: bash
 
-  ./breeze push-image --python 3.9
-  ./breeze push-image --python 3.9 --use-github-registry
+  ./breeze push-image --python 3.10
 
-* Find the 3 new images (main, ci, build) created in
+* Find the 4 new images (main, ci, build, ci-manifest) created in
   `GitHub Container registry <https://github.com/orgs/apache/packages?tab=packages&ecosystem=container&q=airflow>`_
-  go to Package Settings and turn on ``Public Visibility`` and add ``airflow-committers``
-  group as ``Admin Role`` to all of them.
-
-* In `DockerHub <https://hub.docker.com/repository/docker/apache/airflow/builds/edit>`_  create three entries
-  for automatically built nightly-tag and release images:
-
-
-+-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+
-| Source type | Source         | Docker Tag            | Dockerfile location | Build Context | Autobuild | Build caching | Comment                                                                |
-+=============+================+=======================+=====================+===============+===========+===============+========================================================================+
-| Tag         | nightly-main   | main-python3.9        | Dockerfile          | /             | x         | -             | Nightly CI/PROD images from successful scheduled main nightly builds   |
-+-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+
-| Branch      | v2-*-stable    | v2-*-stable-python3.9 | Dockerfile          | /             | x         |               | CI/PROD images automatically built pushed stable branch                |
-+-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+
-| Tag         | /^([1-2].*)$/  | {\1}-python3.9        | Dockerfile          | /             | x         |               | CI/PROD images automatically built from pushed release tags            |
-+-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+
+  go to Package Settings and turn on ``Public Visibility`` and set "Inherit access from Repository" flag.
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 3cb96ee..98cdf93 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -420,15 +420,11 @@ against main is done to ``v2-*-test`` branches, but PRs from contributors toward
 The ``v2-*-test`` branches and ``v2-*-stable`` ones are merged just before the release and that's the
 time when they converge.
 
-The production images are build in DockerHub from:
+The production images are released in DockerHub from:
 
 * main branch for development
-* v2-*-test branches for testing 2.*.x release
 * ``2.*.*``, ``2.*.*rc*`` releases from the ``v2-*-stable`` branch when we prepare release candidates and
-  final releases. There are no production images prepared from v2-*-stable branch.
-
-Similar rules apply to ``1.10.x`` releases until June 2021. We have ``v1-10-test`` and ``v1-10-stable``
-branches there.
+  final releases.
 
 Development Environments
 ========================
diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst
index 40fd98e..d5a3551 100644
--- a/CONTRIBUTORS_QUICK_START.rst
+++ b/CONTRIBUTORS_QUICK_START.rst
@@ -304,8 +304,6 @@ Using Breeze
    Docker image:           apache/airflow:main-python3.8-ci
    Airflow source version: 2.0.0b2
    Python version:         3.8
-   DockerHub user:         apache
-   DockerHub repo:         airflow
    Backend:                mysql 5.7
 
 
diff --git a/IMAGES.rst b/IMAGES.rst
index 47ce39f..82e6989 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -59,43 +59,6 @@ so that any changes in setup.py do not trigger reinstalling of all dependencies.
 There is a second step of installation that re-installs the dependencies
 from the latest sources so that we are sure that latest dependencies are installed.
 
-Image naming conventions
-========================
-
-The images are named as follows:
-
-``apache/airflow-ci:<BRANCH>-python<PYTHON_MAJOR_MINOR_VERSION>[-ci][-manifest]``
-
-For production images tagged with official releases:
-
-``apache/airflow:<TAG>-python<PYTHON_MAJOR_MINOR_VERSION>``
-
-And for production images with ``latest`` tag:
-
-````apache/airflow:latest[-python<PYTHON_MAJOR_MINOR_VERSION>]``
-
-where:
-
-* ``BRANCH_OR_TAG`` - branch or tag used when creating the image. Examples: ``main``,
-  ``v2-1-test``, ``2.1.0``. The ``main``, ``v2-*-test`` labels are
-  built from branches so they change over time. The ``2.*.*`` labels are built from git tags
-  and they are "fixed" once built.
-* ``PYTHON_MAJOR_MINOR_VERSION`` - version of Python used to build the image. Examples: ``3.6``, ``3.7``,
-  ``3.8``, ``3.9``
-* The ``-ci`` suffix is added for CI images
-* The ``-manifest`` is added for manifest images (see below for explanation of manifest images)
-
-We also store (to increase speed of local build/pulls) Python images that were used to build
-the CI images. Each CI image, when built uses current Python version of the base images. Those
-python images are regularly updated (with bugfixes/security fixes), so for example Python 3.8 from
-last week might be a different image than Python 3.8 today. Therefore whenever we push CI image
-to airflow repository, we also push the Python image that was used to build it this image is stored
-as ``apache/airflow-ci:python<PYTHON_MAJOR_MINOR_VERSION>-<BRANCH_OR_TAG>``.
-
-Since those are simply snapshots of the existing Python images, DockerHub does not create a separate
-copy of those images - all layers are mounted from the original Python images and those are merely
-labels pointing to those.
-
 Building docker images from current sources
 ===========================================
 
@@ -214,8 +177,8 @@ In this case you airflow and all packages (.whl files) should be placed in ``doc
 Using cache during builds
 =========================
 
-Default mechanism used in Breeze for building CI images uses images pulled from DockerHub or
-GitHub Image Registry. This is done to speed up local builds and CI builds - instead of 15 minutes
+Default mechanism used in Breeze for building CI images uses images pulled from
+GitHub Container Registry. This is done to speed up local builds and CI builds - instead of 15 minutes
 for rebuild of CI images, it takes usually less than 3 minutes when cache is used. For CI builds this is
 usually the best strategy - to use default "pull" cache. This is default strategy when
 `<BREEZE.rst>`_ builds are performed.
@@ -265,61 +228,12 @@ or
 
   export DOCKER_CACHE="disabled"
 
+Naming conventions
+==================
 
-Choosing image registry
-=======================
-
-By default images are pulled and pushed from and to DockerHub registry when you use Breeze's push-image
+By default images are pulled and pushed from and to Github Container registry when you use Breeze's push-image
 or build commands.
 
-But as described in `CI Documentation <CI.rst>`_, you can choose GitHub Container Registry.
-
-Naming convention for DockerHub images.
-
-Images used during CI builds:
-
-.. code-block:: bash
-
-  apache/airflow-ci:<BRANCH>-pythonX.Y         - for production images
-  apache/airflow-ci:<BRANCH>-pythonX.Y-ci      - for CI images
-  apache/airflow-ci:<BRANCH>-pythonX.Y-build   - for production build stage
-  apache/airflow-ci:pythonX.Y-<BRANCH>         - for Python base image used for both CI and PROD image
-
-For example:
-
-.. code-block:: bash
-
-  apache/airflow-ci:main-python3.6                - production "main" image from current main
-  apache/airflow-ci:main-python3.6-ci             - CI "main" image from current main
-  apache/airflow-ci:v2-1-test-python3.6-ci          - CI "main" image from current v2-1-test branch
-  apache/airflow:python3.6-main                - base Python image for the main branch
-
-You can see those CI DockerHub images at `<https://hub.docker.com/r/apache/airflow-ci>`_
-
-Released, production images
-
-.. code-block:: bash
-
-  apache/airflow:<TAG>-pythonX.Y         - for tagged released production images
-  apache/airflow:<TAG>                   - for default Python version released production images
-  apache/airflow:latest-pythonX.Y        - for latest released production images
-  apache/airflow:latest                  - for default Python version of latest released production images
-
-For example:
-
-.. code-block:: bash
-
-  apache/airflow:2.1.0-python3.8         - for regular released 2.1.0 production image with Python 3.8
-  apache/airflow:2.1.0                   - for default Python version of 2.1.0 production image
-  apache/airflow:latest-python3.8        - for latest released Python 3.8 production image
-  apache/airflow:latest                  - for latest released default Python production image
-
-You can see those CI DockerHub images at `<https://hub.docker.com/r/apache/airflow>`_
-
-
-Using GitHub Container Registry as build cache
-----------------------------------------------
-
 We are using GitHub Container Registry as build cache.The images are all in organization wide "apache/"
 namespace. We are adding "airflow-" as prefix for image names of all Airflow images.
 The images are linked to the repository via ``org.opencontainers.image.source`` label in the image.
@@ -346,18 +260,12 @@ Latest images (pushed when main merge succeeds):
   ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-build-v2:latest - for production build stage
   ghcr.io/apache/airflow-python-v2:X.Y-slim-buster          - for base Python images
 
-Note that we never push or pull "release" images to GitHub registry. Those are only pushed to DockerHub.
 You can see all the current GitHub images at `<https://github.com/apache/airflow/packages>`_
 
-In order to interact with the GitHub Container Registry you need to add ``--use-github-registry``
-flag to the pull/push commands in Breeze. This way the images will be pulled/pushed from/to GitHub
-rather than from/to DockerHub. Images are build locally as ``apache/airflow`` images but then they are
-tagged with the right GitHub tags for you automatically.
-
-You can read more about the CI configuration and how CI builds are using DockerHub/GitHub images
+You can read more about the CI configuration and how CI builds are using GitHub images
 in `<CI.rst>`_.
 
-Note that you need to be committer and have the right to push to DockerHub and GitHub and you need to
+Note that you need to be committer and have the right to push to GitHub and you need to
 be logged in to the registry. Only committers can push images directly. You need to login with your
 Personal Access Token with "packages" write scope to be able to push to those repositories or pull from them
 in case of GitHub Packages.
@@ -368,7 +276,6 @@ GitHub Container Registry
 
   docker login ghcr.io
 
-
 Since there are different naming conventions used for Airflow images and there are multiple images used,
 `Breeze <BREEZE.rst>`_ provides easy to use management interface for the images. The
 `CI system of ours <CI.rst>`_ is designed in the way that it should automatically refresh caches, rebuild
@@ -383,10 +290,10 @@ Force building Python 3.6 CI image using local cache and pushing it container re
 
 .. code-block:: bash
 
-  ./breeze build-image --python 3.6 --force-build-images --force-pull-base-python-image --build-cache-local
-  ./breeze push-image --python 3.6 --use-github-registry
+  ./breeze build-image --python 3.6 --force-build-images --check-if-base-python-image-updated --build-cache-local
+  ./breeze push-image --python 3.6
 
-Building Python 3.8 CI image using cache pulled from DockerHub and pushing it back:
+Building Python 3.8 CI image using cache pulled from GitHub Container Registry and pushing it back:
 
 .. code-block:: bash
 
@@ -404,7 +311,6 @@ For example this command will run the same Python 3.8 image as was used in build
 .. code-block:: bash
 
   ./breeze --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e \
-    --use-github-registry \
     --python 3.8 --integration kerberos
 
 You can see more details and examples in `Breeze <BREEZE.rst>`_
@@ -647,13 +553,13 @@ CI Image manifests
 Together with the main CI images we also build and push image manifests. Those manifests are very small images
 that contain only content of randomly generated file at the 'crucial' part of the CI image building.
 This is in order to be able to determine very quickly if the image in the docker registry has changed a
-lot since the last time. Unfortunately docker registry (specifically DockerHub registry) has no anonymous
+lot since the last time. Unfortunately docker registry has no anonymous
 way of querying image details via API. You really need to download the image to inspect it.
 We workaround it in the way that always when we build the image we build a very small image manifest
 containing randomly generated UUID and push it to registry together with the main CI image.
 The tag for the manifest image reflects the image it refers to with added ``-manifest`` suffix.
-The manifest image for ``apache/airflow:main-python3.6-ci`` is named
-``apache/airflow:main-python3.6-ci-manifest``.
+The manifest image for ``ghcr.io/apache/airflow-main-python3.6-ci-v2`` is named
+``ghcr.io/apache/airflow-main-python3.6-ci-v2-manifest``.
 
 The image is quickly pulled (it is really, really small) when important files change and the content
 of the randomly generated UUID is compared with the one in our image. If the contents are different
@@ -669,9 +575,8 @@ Working with the images
 Pulling the Latest Images
 -------------------------
 
-Sometimes the image needs to be refreshed from the registry in DockerHub - because you have an outdated
-version. You can do it via the ``--force-pull-images`` flag to force pulling the latest images from the
-DockerHub.
+Sometimes the image needs to be refreshed from the GitHub Container Registry - because you have an outdated
+version. You can do it via the ``--force-pull-images`` flag to force pulling the latest images.
 
 For production image:
 
@@ -692,14 +597,12 @@ Refreshing Base Python images
 Python base images are updated from time-to-time, usually as a result of implementing security fixes.
 When you build your image locally using ``docker build`` you use the version of image that you have locally.
 For the CI builds using ``breeze`` we use the image that is stored in our repository in order to use cache
-efficiently. However we can refresh the image to latest available by specifying
-``--force-pull-base-python-image`` and running it manually (you need to have access to DockerHub and our
-GitHub Registies in order to be able to do that.
+efficiently. However CI push build have ``CHECK_IF_BASE_PYTHON_IMAGE_UPDATED`` variable set to ``true``
+which checks if the image has been released and will pull it and rebuild it if needed
 
 .. code-block:: bash
 
     #/bin/bash
-    export DOCKERHUB_USER="apache"
     export GITHUB_REPOSITORY="apache/airflow"
     export FORCE_ANSWER_TO_QUESTIONS="true"
     export CI="true"
@@ -707,13 +610,11 @@ GitHub Registies in order to be able to do that.
     for python_version in "3.6" "3.7" "3.8"
     do
             ./breeze build-image --python ${python_version} --build-cache-local \
-                    --force-pull-base-python-image --verbose
+                    --check-if-python-base-image-updated --verbose
             ./breeze build-image --python ${python_version} --build-cache-local \
                     --production-image --verbose
             ./breeze push-image
-            ./breeze push-image --use-github-registry
             ./breeze push-image --production-image
-            ./breeze push-image --production-image --use-github-registry
     done
 
 Running the CI image
diff --git a/README.md b/README.md
index b6614c2..9798136 100644
--- a/README.md
+++ b/README.md
@@ -50,6 +50,7 @@ Use Airflow to author workflows as directed acyclic graphs (DAGs) of tasks. The
 - [Official source code](#official-source-code)
 - [Convenience packages](#convenience-packages)
 - [User Interface](#user-interface)
+- [Support for Python and Kubernetes versions](#support-for-python-and-kubernetes-versions-1)
 - [Contributing](#contributing)
 - [Who uses Apache Airflow?](#who-uses-apache-airflow)
 - [Who Maintains Apache Airflow?](#who-maintains-apache-airflow)
@@ -243,6 +244,46 @@ following the ASF Policy.
 
   ![Code View](/docs/apache-airflow/img/code.png)
 
+| Version | Current Patch/Minor | State     | First Release | Limited Support | EOL/Terminated |
+|---------|---------------------|-----------|---------------|-----------------|----------------|
+| 2       | 2.1.1               | Supported | Dec 17, 2020  | Dec 2021        | TBD            |
+| 1.10    | 1.10.15             | EOL       | Aug 27, 2018  | Dec 17, 2020    | June 17, 2021  |
+| 1.9     | 1.9.0               | EOL       | Jan 03, 2018  | Aug 27, 2018    | Aug 27, 2018   |
+| 1.8     | 1.8.2               | EOL       | Mar 19, 2017  | Jan 03, 2018    | Jan 03, 2018   |
+| 1.7     | 1.7.1.2             | EOL       | Mar 28, 2016  | Mar 19, 2017    | Mar 19, 2017   |
+
+Limited support versions will be supported with security and critical bug fix only.
+EOL versions will not get any fixes nor support.
+We always recommend that all users run the latest available minor release for whatever major version is in use.
+We **highly** recommend upgrading to the latest Airflow major release at the earliest convenient time and before EOL date.
+
+## Support for Python and Kubernetes versions
+
+As of Airflow 2.0 we agreed to certain rules we follow for Python and Kubernetes support.
+They are based on the official release schedule of Python and Kubernetes, nicely summarized in the
+[Python Developer's Guide](https://devguide.python.org/#status-of-python-branches) and
+[Kubernetes version skew policy](https://kubernetes.io/docs/setup/release/version-skew-policy/).
+
+1. We drop support for Python and Kubernetes versions when they reach EOL. We drop support for those
+   EOL versions in main right after EOL date, and it is effectively removed when we release the
+   first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow
+   For example for Python 3.6 it means that we drop support in main right after 23.12.2021, and the first
+   MAJOR or MINOR version of Airflow released after will not have it.
+
+2. The "oldest" supported version of Python/Kubernetes is the default one. "Default" is only meaningful
+   in terms of "smoke tests" in CI PRs which are run using this default version and default reference
+   image available. Currently ``apache/airflow:latest`` and ``apache/airflow:2.1.1` images
+   are both Python 3.6 images, however the first MINOR/MAJOR release of Airflow release after 23.12.2021 will
+   become Python 3.7 images.
+
+3. We support a new version of Python/Kubernetes in main after they are officially released, as soon as we
+   make them work in our CI pipeline (which might not be immediate due to dependencies catching up with
+   new versions of Python mostly) we release a new images/support in Airflow based on the working CI setup.
+
+### Additional notes on Python version requirements
+
+* Previous version [requires](https://github.com/apache/airflow/issues/8162) at least Python 3.5.3
+  when using Python 3
 
 ## Contributing
 
diff --git a/TESTING.rst b/TESTING.rst
index 6cb76c6..53c371d 100644
--- a/TESTING.rst
+++ b/TESTING.rst
@@ -717,8 +717,6 @@ The typical session for tests with Kubernetes looks like follows:
 
        Airflow source version:  2.0.0.dev0
        Python version:          3.7
-       DockerHub user:          apache
-       DockerHub repo:          airflow
        Backend:                 postgres 9.6
 
     No kind clusters found.
@@ -759,8 +757,6 @@ The typical session for tests with Kubernetes looks like follows:
 
        Airflow source version:  2.0.0.dev0
        Python version:          3.7
-       DockerHub user:          apache
-       DockerHub repo:          airflow
        Backend:                 postgres 9.6
 
     airflow-python-3.7-v1.17.0-control-plane
diff --git a/breeze b/breeze
index 6240d07..3b09208 100755
--- a/breeze
+++ b/breeze
@@ -120,7 +120,7 @@ function breeze::setup_default_breeze_constants() {
 
     # By default we do not pull python base image. We should do that only when we run upgrade check in
     # CI main and when we manually refresh the images to latest versions
-    export FORCE_PULL_BASE_PYTHON_IMAGE="false"
+    export CHECK_IF_BASE_PYTHON_IMAGE_UPDATED="false"
 
     # Forward common host credentials to docker (gcloud, aws etc.).
     export FORWARD_CREDENTIALS="false"
@@ -416,8 +416,6 @@ EOF
 #    AIRFLOW_CI_IMAGE
 #    AIRFLOW_PROD_IMAGE
 #    AIRFLOW_VERSION
-#    DOCKERHUB_USER
-#    DOCKERHUB_REPO
 #    INSTALL_AIRFLOW_VERSION
 #    INSTALL_AIRFLOW_REFERENCE
 #
@@ -483,7 +481,6 @@ EOF
 
                                Branch name:            ${BRANCH_NAME}
                                Docker image:           ${AIRFLOW_PROD_IMAGE}
-                               GitHub cache prefix:    ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)
                                Airflow source version: $(build_images::get_airflow_version_from_production_image)
 EOF
         else
@@ -493,14 +490,11 @@ EOF
 
                                Branch name:            ${BRANCH_NAME}
                                Docker image:           ${AIRFLOW_CI_IMAGE}
-                               GitHub cache prefix:    ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)
                                Airflow source version: ${AIRFLOW_VERSION}
 EOF
         fi
         cat <<EOF
                                Python version:         ${PYTHON_MAJOR_MINOR_VERSION}
-                               DockerHub user:         ${DOCKERHUB_USER}
-                               DockerHub repo:         ${DOCKERHUB_REPO}
                                Backend:                ${BACKEND} ${backend_version}
 EOF
         if [[ -n ${USE_AIRFLOW_VERSION=} ]]; then
@@ -518,7 +512,6 @@ EOF
 
    Branch name:             ${BRANCH_NAME}
    Docker image:            ${AIRFLOW_PROD_IMAGE}
-   GitHub cache prefix:     ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)
 EOF
         else
             cat <<EOF
@@ -527,7 +520,6 @@ EOF
 
    Branch name:             ${BRANCH_NAME}
    Docker image:            ${AIRFLOW_CI_IMAGE}
-   GitHub cache prefix:     ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)
 
 EOF
         fi
@@ -541,8 +533,6 @@ EOF
 
    Airflow source version:  ${AIRFLOW_VERSION}
    Python version:          ${PYTHON_MAJOR_MINOR_VERSION}
-   DockerHub user:          ${DOCKERHUB_USER}
-   DockerHub repo:          ${DOCKERHUB_REPO}
    Backend:                 ${BACKEND} ${backend_version}
 EOF
     fi
@@ -567,8 +557,6 @@ EOF
 # Used Globals:
 #   BRANCH_NAME
 #   PYTHON_MAJOR_MINOR_VERSION
-#   DOCKERHUB_USER
-#   DOCKERHUB_REPO
 #   BACKEND
 #   AIRFLOW_VERSION
 #   INSTALL_AIRFLOW_VERSION
@@ -600,7 +588,6 @@ function breeze::prepare_command_file() {
     local file="${1}"
     local command="${2}"
     local compose_file="${3}"
-    local airflow_image="${4}"
     cat <<EOF >"${file}"
 #!/usr/bin/env bash
 if [[ \${VERBOSE} == "true" ]]; then
@@ -612,8 +599,6 @@ if [[ \${VERBOSE} == "true" ]]; then
   set -x
 fi
 cd "\$( dirname "\${BASH_SOURCE[0]}" )" || exit
-export DOCKERHUB_USER=${DOCKERHUB_USER}
-export DOCKERHUB_REPO=${DOCKERHUB_REPO}
 export HOST_USER_ID=${HOST_USER_ID}
 export HOST_GROUP_ID=${HOST_GROUP_ID}
 export COMPOSE_FILE="${compose_file}"
@@ -634,7 +619,6 @@ export AIRFLOW_CI_IMAGE="${AIRFLOW_CI_IMAGE}"
 export AIRFLOW_PROD_IMAGE="${AIRFLOW_PROD_IMAGE}"
 export AIRFLOW_PROD_IMAGE_KUBERNETES="${AIRFLOW_PROD_IMAGE_KUBERNETES}"
 export AIRFLOW_PROD_BASE_TAG="${AIRFLOW_PROD_BASE_TAG}"
-export AIRFLOW_IMAGE="${airflow_image}"
 export SQLITE_URL="${SQLITE_URL}"
 export USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION}"
 export USE_PACKAGES_FROM_DIST="${USE_PACKAGES_FROM_DIST}"
@@ -646,51 +630,38 @@ EOF
 
 #######################################################################################################
 #
-# Prepare all command files that we are using. Depending on the command to execute we use two
-# convenience scripts:
-#
-#    dc_ci - to run docker compose command for CI image
-#    dc_prod - to run docker compose command for PROD image
+# Prepare all command files that we are using. We use dc_ci - to run docker compose command for CI image
 #
 # Global constants set:
 #
 #     PYTHON_BASE_IMAGE_VERSION
 #     PYTHON_BASE_IMAGE
 #     AIRFLOW_CI_IMAGE
-#     AIRFLOW_PROD_BASE_TAG
-#     AIRFLOW_PROD_IMAGE
-#     AIRFLOW_PROD_IMAGE_KUBERNETES
 #     BUILT_CI_IMAGE_FLAG_FILE
 #
 #######################################################################################################
 function breeze::prepare_command_files() {
     local main_ci_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/base.yml
-    local main_prod_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/base.yml
     local backend_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml
     local backend_port_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}-port.yml
     local local_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/local.yml
     local local_all_sources_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/local-all-sources.yml
     local files_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/files.yml
-    local local_prod_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/local-prod.yml
     local remove_sources_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml
     local forward_credentials_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml
 
     local compose_ci_file=${main_ci_docker_compose_file}:${backend_docker_compose_file}:${files_docker_compose_file}
-    local compose_prod_file=${main_prod_docker_compose_file}:${backend_docker_compose_file}:${files_docker_compose_file}
 
     if [[ "${MOUNT_SELECTED_LOCAL_SOURCES}" != "false" ]]; then
         compose_ci_file=${compose_ci_file}:${local_docker_compose_file}:${backend_port_docker_compose_file}
-        compose_prod_file=${compose_prod_file}:${local_prod_docker_compose_file}:${backend_port_docker_compose_file}
     fi
 
     if [[ "${MOUNT_ALL_LOCAL_SOURCES}" != "false" ]]; then
         compose_ci_file=${compose_ci_file}:${local_all_sources_docker_compose_file}:${backend_port_docker_compose_file}
-        compose_prod_file=${compose_prod_file}:${local_all_sources_docker_compose_file}:${backend_port_docker_compose_file}
     fi
 
     if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then
         compose_ci_file=${compose_ci_file}:${forward_credentials_docker_compose_file}
-        compose_prod_file=${compose_prod_file}:${forward_credentials_docker_compose_file}
     fi
 
     if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE} || -n ${USE_AIRFLOW_VERSION=} ]]; then
@@ -711,16 +682,8 @@ function breeze::prepare_command_files() {
     export DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI="dc_ci"
     readonly DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI
 
-    export DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD="dc_prod"
-    readonly DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD
-
-    # Prepare script for "run docker compose CI command"
     breeze::prepare_command_file "${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI}" \
-        "\"\${@}\"" "${compose_ci_file}" "${AIRFLOW_CI_IMAGE}"
-
-    # Prepare script for "run docker compose PROD command"
-    breeze::prepare_command_file "${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD}" \
-        "\"\${@}\"" "${compose_prod_file}" "${AIRFLOW_PROD_IMAGE}"
+        "\"\${@}\"" "${compose_ci_file}"
 }
 
 #######################################################################################################
@@ -999,10 +962,10 @@ function breeze::parse_arguments() {
             export FORCE_ANSWER_TO_QUESTIONS="yes"
             shift
             ;;
-        --force-pull-base-python-image)
-            echo "Force pulling base python image. Uses pulled images as cache."
+        --check-if-base-python-image-updated)
+            echo "Checks if base python image has been."
             echo
-            export FORCE_PULL_BASE_PYTHON_IMAGE="true"
+            export CHECK_IF_BASE_PYTHON_IMAGE_UPDATED="true"
             export FORCE_BUILD_IMAGES="true"
             # if you want to force  build an image - assume you want to build it :)
             export FORCE_ANSWER_TO_QUESTIONS="yes"
@@ -1118,38 +1081,18 @@ function breeze::parse_arguments() {
             echo "Tag to add to the image: ${IMAGE_TAG}"
             shift 2
             ;;
-        -D | --dockerhub-user)
-            export DOCKERHUB_USER="${2}"
-            echo "Dockerhub user ${DOCKERHUB_USER}"
-            echo
-            shift 2
-            ;;
-        -R | --dockerhub-repo)
-            export DOCKERHUB_REPO="${2}"
-            echo "Dockerhub repo ${DOCKERHUB_REPO}"
-            echo
-            shift 2
-            ;;
         -f | --forward-credentials)
             echo "Forwarding credentials. Be careful as your credentials ar available in the container!"
             echo
             export FORWARD_CREDENTIALS="true"
             shift
             ;;
-        -c | --use-github-registry)
-            echo
-            echo "Use GitHub registry"
-            echo
-            export USE_GITHUB_REGISTRY="true"
-            shift
-            ;;
         -g | --github-repository)
             echo
             echo "Using GitHub registry."
             echo "GitHub repository: ${2}"
             echo
             export GITHUB_REPOSITORY="${2}"
-            export USE_GITHUB_REGISTRY="true"
             shift 2
             ;;
         -s | --github-image-id)
@@ -1162,7 +1105,6 @@ function breeze::parse_arguments() {
             echo "behaviour as in the CI environment."
             echo
             export FORCE_PULL_IMAGES="true"
-            export USE_GITHUB_REGISTRY="true"
             export GITHUB_REGISTRY_PULL_IMAGE_TAG="${2}"
             export GITHUB_REGISTRY_PUSH_IMAGE_TAG="${2}"
             export CHECK_IMAGE_FOR_REBUILD="false"
@@ -1718,13 +1660,12 @@ ${CMDNAME} shell [FLAGS] [-- <EXTRA_ARGS>]
       '${CMDNAME} shell -- -c \"ls -la\"'
       '${CMDNAME} -- -c \"ls -la\"'
 
-      For DockerHub pull: --dockerhub-user and --dockerhub-repo flags can be used to specify
-      the repository to pull from. For GitHub repository, the --github-repository
-      flag can be used for the same purpose. You can also use --github-image-id <COMMIT_SHA> in case
+      For GitHub repository, the --github-repository flag can be used to specify the repository
+      to pull and push images. You can also use --github-image-id <COMMIT_SHA> in case
       you want to pull the image with specific COMMIT_SHA tag.
 
       '${CMDNAME} shell \\
-            --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
+            -- github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
       '${CMDNAME} \\
             --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
 
@@ -1764,14 +1705,16 @@ ${CMDNAME} build-image [FLAGS]
          '--build-cache-local' or '-build-cache-pulled', or '--build-cache-none'
 
       Choosing whether to force pull images or force build the image:
-          '--force-build-image',
-           '--force-pull-image', '--force-pull-base-python-image'
+          '--force-build-image', '--force-pull-image'
+
+      Checking if the base python image has been updated:
+          '--check-if-base-python-image-updated'
 
       You can also pass '--production-image' flag to build production image rather than CI image.
 
-      For DockerHub pulling of base images: '--dockerhub-user' and '--dockerhub-repo' flags can be
-      used to specify the repository to pull from. For GitHub repository, the '--github-repository'
-      flag can be used for the same purpose.
+      For GitHub repository, the '--github-repository' can be used to choose repository
+      to pull/push images.
+
 Flags:
 $(breeze::flag_airflow_variants)
 $(breeze::flag_build_different_airflow_version)
@@ -1938,24 +1881,17 @@ $(breeze::flag_verbosity)
     export DETAILED_USAGE_PUSH_IMAGE="
 ${CMDNAME} push_image [FLAGS]
 
-      Pushes images to docker registry. You can push the images to DockerHub registry (default)
-      or to the GitHub registry (if --use-github-registry flag is used).
-
-      For DockerHub pushes: --dockerhub-user and --dockerhub-repo flags can be used to specify
-      the repository to push to. For GitHub repository, the --github-repository
-      flag can be used for the same purpose. You can also add
-      --github-image-id <COMMIT_SHA> in case you want to push image with specific
-      SHA tag. In case you specify --github-repository or --github-image-id, you
-      do not need to specify --use-github-registry flag.
+      Pushes images to GitHub registry.
 
+      You can add --github-repository to push to a different repository/organisation.
+      You can add --github-image-id <COMMIT_SHA> in case you want to push image with specific
+      SHA tag.
       You can also add --production-image flag to switch to production image (default is CI one)
 
       Examples:
 
       '${CMDNAME} push-image' or
-      '${CMDNAME} push-image --dockerhub-user user' to push to your private registry or
       '${CMDNAME} push-image --production-image' - to push production image or
-      '${CMDNAME} push-image --use-github-registry' - to push to GitHub image registry or
       '${CMDNAME} push-image \\
             --github-repository user/airflow' - to push to your user's fork
       '${CMDNAME} push-image \\
@@ -2644,16 +2580,15 @@ function breeze::flag_build_docker_images() {
         package-related files, but you can force it using this flag.
 
 -P, --force-pull-images
-        Forces pulling of images from DockerHub before building to populate cache. The
-        images are pulled by default only for the first time you run the
+        Forces pulling of images from GitHub Container Registry before building to populate cache.
+        The images are pulled by default only for the first time you run the
         environment, later the locally build images are used as cache.
 
---force-pull-base-python-image
-        Forces pulling of Python base image from DockerHub before building to
-        populate cache. This should only be run in case we need to update to latest available
-        Python base image. This should be a rare and manually triggered event. Also this flag
-        is used in the scheduled run in CI when we rebuild all the images from the scratch
-        and run the tests to see if the latest python images do not fail our tests.
+--check-if-base-python-image-updated
+        Checks if Python base image from DockerHub has been updated vs the current python base
+        image we store in GitHub Container Registry. Python images are updated regularly with
+        security fixes, this switch will check if a new one has been released and will pull and
+        prepare a new base python based on the latest one.
 
 --cleanup-docker-context-files
         Removes whl and tar.gz files created in docker-context-files before running the command.
@@ -2762,14 +2697,10 @@ Build options:
         This is default strategy used by the Production image builds.
 
 -U, --build-cache-pulled
-        Uses images pulled from registry (either DockerHub or GitHub depending on
-        --use-github-registry flag) to build images. The pulled images will be used as cache.
+        Uses images pulled from GitHub Container Registry to build images.
         Those builds are usually faster than when ''--build-cache-local'' with the exception if
-        the registry images are not yet updated. The DockerHub images are updated nightly and the
-        GitHub images are updated after merges to main so it might be that the images are still
-        outdated vs. the latest version of the Dockerfiles you are using. In this case, the
-        ''--build-cache-local'' might be faster, especially if you iterate and change the
-        Dockerfiles yourself.
+        the registry images are not yet updated. The images are updated after successful merges
+        to main.
 
         This is default strategy used by the CI image builds.
 
@@ -2788,8 +2719,6 @@ Build options:
 # Prints flags controlling docker pull and push process
 #
 # Global constants used:
-#       _breeze_default_dockerhub_user
-#       _breeze_default_dockerhub_repo
 #       _breeze_default_github_repository
 #       _breeze_default_github_image_id
 #
@@ -2799,23 +2728,10 @@ Build options:
 function breeze::flag_pull_push_docker_images() {
     local show_sha="${1-'show_sha'}"
     echo "
--D, --dockerhub-user DOCKERHUB_USER
-        DockerHub user used to pull, push and build images. Default: ${_breeze_default_dockerhub_user:=}.
-
--H, --dockerhub-repo DOCKERHUB_REPO
-        DockerHub repository used to pull, push, build images. Default: ${_breeze_default_dockerhub_repo:=}.
-
--c, --use-github-registry
-        If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
-        DockerHub. You need to be logged in to the registry in order to be able to pull/push from
-        and you need to be committer to push to Apache Airflow' GitHub registry.
-
 -g, --github-repository GITHUB_REPOSITORY
-        GitHub repository used to pull, push images when cache is used.
+        GitHub repository used to pull, push images.
         Default: ${_breeze_default_github_repository:=}.
 
-        If you use this flag, automatically --use-github-registry flag is enabled.
-
 "
 
     [[ "$show_sha" != "no_show_sha" ]] && echo "
@@ -2827,8 +2743,6 @@ function breeze::flag_pull_push_docker_images() {
         automatically pull and use that image so that you can easily reproduce a problem
         that occurred in CI.
 
-        If you use this flag, automatically --use-github-registry is enabled.
-
         Default: ${_breeze_default_github_image_id:=}.
 
 "
@@ -3051,8 +2965,6 @@ function breeze::print_star_line() {
 #   EXECUTOR
 #   POSTGRES_VERSION
 #   MYSQL_VERSION
-#   DOCKERHUB_USER
-#   DOCKERHUB_REPO
 #
 #######################################################################################################
 
@@ -3102,8 +3014,6 @@ function breeze::read_saved_environment_variables() {
 #     EXECUTOR
 #     POSTGRES_VERSION
 #     MYSQL_VERSION
-#     DOCKERHUB_USER
-#     DOCKERHUB_REPO
 #
 # Updated Global constants:
 #      BRANCH_NAME
@@ -3270,16 +3180,12 @@ function breeze::make_sure_precommit_is_installed() {
 function breeze::remove_images() {
     # shellcheck disable=SC2086
     docker rmi --force ${PYTHON_BASE_IMAGE} \
-                       ${GITHUB_REGISTRY_PYTHON_BASE_IMAGE} \
                        ${AIRFLOW_PYTHON_BASE_IMAGE} \
-                       ${AIRFLOW_CI_IMAGE} \
-                       ${DEFAULT_CI_IMAGE} \
                        ${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE} \
-                       ${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE} \
+                       ${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE} \
+                       ${AIRFLOW_CI_IMAGE} \
                        ${AIRFLOW_PROD_IMAGE} \
-                       ${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE} \
                        ${AIRFLOW_PROD_BUILD_IMAGE} \
-                       ${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE} \
         2>/dev/null >/dev/null && true
     echo
     echo "###################################################################"
@@ -3484,7 +3390,7 @@ function breeze::run_breeze_command() {
         docker_engine_resources::check_all_resources
         if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
             ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}"
-            ${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh"  || true
+            ${run_command} "${SCRIPTS_CI_DIR}/tools/fix_ownership.sh"  || true
         else
             ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}"
         fi
@@ -3541,9 +3447,9 @@ function breeze::run_breeze_command() {
         ;;
     perform_push_image)
         if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
-            push_pull_remove_images::push_prod_images
+            push_pull_remove_images::push_prod_images_to_github
         else
-            push_pull_remove_images::push_ci_images
+            push_pull_remove_images::push_ci_images_to_github
         fi
         ;;
     perform_initialize_local_virtualenv)
diff --git a/breeze-complete b/breeze-complete
index 58af21b..db9e42a 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -145,8 +145,6 @@ yesqa
 EOF
 )
 
-_breeze_default_dockerhub_user="apache"
-_breeze_default_dockerhub_repo="airflow-ci"
 _breeze_default_github_repository="apache/airflow"
 _breeze_default_github_image_id="latest"
 
@@ -157,7 +155,7 @@ l a: t: d:
 v y n q f
 F P I E: C r
 L U X
-D: R: c g: s:
+g: s:
 S: N:
 "
 
@@ -166,9 +164,9 @@ help python: backend: integration:
 kubernetes-mode: kubernetes-version: helm-version: kind-version:
 skip-mounting-local-sources mount-all-local-sources install-airflow-version: install-airflow-reference: db-reset
 verbose assume-yes assume-no assume-quit forward-credentials init-script:
-force-build-images force-pull-images force-pull-base-python-image production-image extras: force-clean-images skip-rebuild-check
+force-build-images force-pull-images check-if-base-python-image-updated production-image extras: force-clean-images skip-rebuild-check
 build-cache-local build-cache-pulled build-cache-disabled disable-pip-cache
-dockerhub-user: dockerhub-repo: use-github-registry github-registry: github-repository: github-image-id: generate-constraints-mode:
+github-repository: github-image-id: generate-constraints-mode:
 postgres-version: mysql-version:
 version-suffix-for-pypi: version-suffix-for-svn:
 additional-extras: additional-python-deps: additional-dev-deps: additional-runtime-deps: image-tag:
@@ -281,12 +279,6 @@ function breeze_complete::get_known_values_breeze() {
     --mysql-version)
         _breeze_known_values=${_breeze_allowed_mysql_versions}
         ;;
-    -D | --dockerhub-user)
-        _breeze_known_values="${_breeze_default_dockerhub_user}"
-        ;;
-    -R | --dockerhub-repo)
-        _breeze_known_values="${_breeze_default_dockerhub_repo}"
-        ;;
     -g | --github-repository)
         _breeze_known_values="${_breeze_default_github_repository}"
         ;;
diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md
index 72988a9..6068508 100644
--- a/dev/README_RELEASE_AIRFLOW.md
+++ b/dev/README_RELEASE_AIRFLOW.md
@@ -168,10 +168,7 @@ Run script to re-tag images from the ``main`` branch to the  ``vX-Y-test`` branc
 ## Prepare PyPI convenience "snapshot" packages
 
 At this point we have the artefact that we vote on, but as a convenience to developers we also want to
-publish "snapshots" of the RC builds to PyPI for installing via pip. Also those packages
-are used to build the production docker image in DockerHub, so we need to upload the packages
-before we push the tag to GitHub. Pushing the tag to GitHub automatically triggers image building in
-DockerHub.
+publish "snapshots" of the RC builds to PyPI for installing via pip:
 
 To do this we need to
 
diff --git a/dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md b/dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md
index b42ccf3..a144389 100644
--- a/dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md
+++ b/dev/README_RELEASE_AIRFLOW_UPGRADE_CHECK.md
@@ -153,10 +153,7 @@ official Apache releases must not include the rcN suffix.
 ## Prepare PyPI convenience "snapshot" packages
 
 At this point we have the artefact that we vote on, but as a convenience to developers we also want to
-publish "snapshots" of the RC builds to pypi for installing via pip. Also those packages
-are used to build the production docker image in DockerHub, so we need to upload the packages
-before we push the tag to GitHub. Pushing the tag to GitHub automatically triggers image building in
-DockerHub.
+publish "snapshots" of the RC builds to pypi for installing via pip.
 
 To do this we need to
 
diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py
index c84f464..5eeda8e 100755
--- a/dev/retag_docker_images.py
+++ b/dev/retag_docker_images.py
@@ -26,22 +26,6 @@
 # * when starting new release branch (for example `v2-1-test`)
 # * when renaming a branch
 #
-# Docker registries we are using:
-#
-# * DockerHub - we keep `apache/airflow-ci` image with distinct tags
-#   that determine type of the image, because in DockerHub we only
-#   have access to `apache/airflow-ci` and `apache/airflow-ci` image
-#   and we want to keep the CI images and prod images separately.
-#
-# * GitHub Docker Registries: (depends on the type of registry) we have
-#   more flexibility:
-#   * In the old GitHub docker registry - docker.pkg.github.com -
-#     (current but already deprecated) we can use
-#     "apache/airflow/IMAGE:tag" i
-#   * in the new package registry (ghcr.io) - we can submitg anything
-#     under apache/airflow-* but then we link it to the
-#     project via docker image label.
-#
 import subprocess
 from typing import List
 
@@ -49,26 +33,10 @@ import click
 
 PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
 
-DOCKERHUB_IMAGES = [
-    "{prefix}:python{python_version}-{branch}",
-    "{prefix}:{branch}-python{python_version}-ci",
-    "{prefix}:{branch}-python{python_version}-ci-manifest",
-    "{prefix}:{branch}-python{python_version}",
-    "{prefix}:{branch}-python{python_version}-build",
-]
-
-GITHUB_DOCKER_REGISTRY_PREFIX = "docker.pkg.github.com/apache/airflow"
-
-GITHUB_REGISTRY_IMAGES = [
-    "{prefix}/{branch}-python{python_version}-ci-v2:latest",
-    "{prefix}/{branch}-python{python_version}-v2:latest",
-    "{prefix}/{branch}-python{python_version}-build-v2:latest",
-]
-
-
 GHCR_IO_PREFIX = "ghcr.io/apache/airflow"
 
 GHCR_IO_IMAGES = [
+    "{prefix}-{branch}-python{python_version}-ci-v2-manifest:latest",
     "{prefix}-{branch}-python{python_version}-ci-v2:latest",
     "{prefix}-{branch}-python{python_version}-v2:latest",
     "{prefix}-{branch}-python{python_version}-build-v2:latest",
@@ -94,40 +62,13 @@ def pull_push_all_images(
 
 
 @click.group(invoke_without_command=True)
-@click.option(
-    "--source-dockerhub", type=str, default="apache/airflow-ci", help="Source repo [apache/airflow-ci]"
-)
-@click.option(
-    "--target-dockerhub", type=str, default="apache/airflow-ci", help="Target repo [apache/airflow-ci]"
-)
-@click.option("--source-branch", type=str, default="main", help="Source branch name [main")
+@click.option("--source-branch", type=str, default="main", help="Source branch name [main]")
 @click.option("--target-branch", type=str, default="main", help="Target branch name [main]")
-@click.option("--dockerhub/--no-dockerhub", default=True, help="Whether to synchronize DockerHub")
-@click.option("--registry/--no-registry", default=True, help="Whether to synchronize GitHub registry")
-@click.option("--ghcr-io/--no-ghcr-io", default=True, help="Whether to synchronize ghcr.io")
 def main(
-    source_dockerhub: str,
-    target_dockerhub: str,
     source_branch: str,
     target_branch: str,
-    dockerhub: bool,
-    registry: bool,
-    ghcr_io: bool,
 ):
-    if dockerhub:
-        pull_push_all_images(
-            source_dockerhub, target_dockerhub, DOCKERHUB_IMAGES, source_branch, target_branch
-        )
-    if registry:
-        pull_push_all_images(
-            GITHUB_DOCKER_REGISTRY_PREFIX,
-            GITHUB_DOCKER_REGISTRY_PREFIX,
-            GITHUB_REGISTRY_IMAGES,
-            source_branch,
-            target_branch,
-        )
-    if ghcr_io:
-        pull_push_all_images(GHCR_IO_PREFIX, GHCR_IO_PREFIX, GHCR_IO_IMAGES, source_branch, target_branch)
+    pull_push_all_images(GHCR_IO_PREFIX, GHCR_IO_PREFIX, GHCR_IO_IMAGES, source_branch, target_branch)
 
 
 if __name__ == "__main__":
diff --git a/images/CI.png b/images/CI.png
deleted file mode 100644
index 7123519..0000000
Binary files a/images/CI.png and /dev/null differ
diff --git a/images/ci/CI.png b/images/ci/CI.png
deleted file mode 100644
index 7123519..0000000
Binary files a/images/ci/CI.png and /dev/null differ
diff --git a/images/ci/pull_request_ci_flow.md5 b/images/ci/pull_request_ci_flow.md5
index d1ff43a..c5ef0a4 100644
--- a/images/ci/pull_request_ci_flow.md5
+++ b/images/ci/pull_request_ci_flow.md5
@@ -1 +1 @@
-7a3a1def02205e15a6c91fb2a5605edf  images/ci/pull_request_ci_flow.mermaid
+1c7aa3a8dd16d79c548c52d0390b6152  images/ci/pull_request_ci_flow.mermaid
diff --git a/images/ci/pull_request_ci_flow.mermaid b/images/ci/pull_request_ci_flow.mermaid
index f97e1cd..a098158 100644
--- a/images/ci/pull_request_ci_flow.mermaid
+++ b/images/ci/pull_request_ci_flow.mermaid
@@ -16,77 +16,101 @@
 %% under the License.
 
 sequenceDiagram
-    Fork -->> Airflow Repo: Start Pull Request
+    Note over Airflow Repo: pull request
     Note over CI Build: pull_request<br>[Read Token]
     Note over Build Images: pull_request_target<br>[Write Token]
     activate Airflow Repo
-    Airflow Repo -->> CI Build: Trigger 'pull_request' event
+    Airflow Repo -->> CI Build: Trigger 'pull_request'
     activate CI Build
+    CI Build -->> Build Images: Trigger 'pull_request_target'
     activate Build Images
-    Airflow Repo -->> Build Images: Trigger 'pull_request_target' event
-    opt Failed builds
-        Note right of Build Images: Find failed<br>[CI Build]
-        Build Images -->> CI Build: Cancel
-    end
     Note over Build Images: Build info
-    par 3.6, 3.7, 3.8
-        Note over Build Images: Build PROD Images<br>[COMMIT_SHA]
-    and
+    par 3.6, [3.7, 3.8, 3.9]
+        activate GitHub Registry
+        GitHub Registry ->> Build Images: Pull CI Images from Cache
+        deactivate GitHub Registry
         Note over Build Images: Build CI Images<br>[COMMIT_SHA]
     end
     par No CI image
-        Note over CI Build: Build info
+        Note over CI Build: Build info<br>Which tests?<br>Which Python?
     and
-        Note over CI Build: Should trigger tests?
+        Note over CI Build: OpenAPI client gen
     and
-        Note over CI Build: Helm tests
+        Note over CI Build: Test UI
     and
-        Note over CI Build: OpenAPI client gen
+        Note over CI Build: Test examples<br>PROD image building
     end
-    par 3.6, 3.7, 3.8
-        Build Images ->> GitHub Registry: Push CI Images
+    par 3.6, [3.7, 3.8, 3.9]
         activate GitHub Registry
-    and
-        Build Images ->> GitHub Registry: Push PROD Images
+        Build Images ->> GitHub Registry: Push CI Images
+        Note over GitHub Registry: Tagged CI Images<br>[COMMIT_SHA]
     end
-    opt If any step failed
-        Build Images -->> CI Build: Cancel associated run
+    par 3.6, [3.7, 3.8, 3.9]
+        GitHub Registry ->> Build Images: Pull PROD Images from Cache
+        Note over Build Images: Build PROD Images<br>[COMMIT_SHA]
     end
-    deactivate Build Images
-    Note over GitHub Registry: Tagged Images<br>[COMMIT_SHA]
-    loop Wait for images
-        par 3.6, 3.7, 3.8
-            CI Build ->> CI Build: Pull CI Images
-            Note over CI Build: Wait for<br>[COMMIT_SHA]
-        and
-            CI Build ->> CI Build: Pull PROD Images
+    loop Wait for CI images
+        par 3.6, [3.7, 3.8, 3.9]
+            CI Build ->> CI Build: Check CI Images
             Note over CI Build: Wait for<br>[COMMIT_SHA]
         end
     end
-    par 3.6, 3.7, 3.8
-        GitHub Registry ->> CI Build: Pull CI Images
-    and
-        GitHub Registry ->> CI Build: Pull PROD Images
+    par 3.6, [3.7, 3.8, 3.9]
+        GitHub Registry ->> CI Build: Pull CI Image
+        Note over CI Build: Verify CI Image
     end
     deactivate GitHub Registry
-    par 3.6, 3.7, 3.8
-        Note over CI Build: Run static checks
+    par 3.6, [3.7, 3.8, 3.9]
+        opt Needed?
+            Note over CI Build: Run static checks
+        end
     and
-        Note over CI Build: Build docs
+        opt Needed?
+            Note over CI Build: Run basic <br>static checks
+        end
     and
-        Note over CI Build: Spell check docs
+        opt Needed?
+            Note over CI Build: Build docs
+        end
     and
-        opt Triggered?
+        opt Needed?
             Note over CI Build: Tests
         end
     and
-        opt Triggered?
-            Note over CI Build: Kubernetes  Tests
+        opt Needed?
+            Note over CI Build: Test provider <br>packages build
+        end
+    and
+        opt Needed?
+            Note over CI Build: Helm tests
+        end
+    end
+    par 3.6, [3.7, 3.8, 3.9]
+        Build Images ->> GitHub Registry: Push PROD Images
+        activate GitHub Registry
+    end
+    deactivate Build Images
+    Note over GitHub Registry: Tagged PROD Images<br>[COMMIT_SHA]
+    loop Wait for PROD images
+        par 3.6, [3.7, 3.8, 3.9]
+            CI Build ->> CI Build: Check PROD Images
+            Note over CI Build: Wait for<br>[COMMIT_SHA]
+        end
+    end
+    par 3.6, [3.7, 3.8, 3.9]
+        GitHub Registry ->> CI Build: Pull PROD Image
+        Note over CI Build: Verify PROD Image
+    end
+    deactivate GitHub Registry
+    par 3.6, [3.7, 3.8, 3.9]
+        opt Needed?
+            Note over CI Build: Run Kubernetes <br>tests
+        end
+    and
+        opt Needed?
+            Note over CI Build: Run Kubernetes <br>upgrade tests
         end
     end
-    Note over CI Build: Quarantined tests
-    Note over CI Build: Merge Coverage
-    CI Build -->> Coverage.io: Upload Coverage
-    CI Build -->> Airflow Repo: Status Check for CI Build
+    CI Build -->> Airflow Repo: Status update
     deactivate Airflow Repo
     deactivate CI Build
diff --git a/images/ci/pull_request_ci_flow.png b/images/ci/pull_request_ci_flow.png
index 12f75d9..f871dc6 100644
Binary files a/images/ci/pull_request_ci_flow.png and b/images/ci/pull_request_ci_flow.png differ
diff --git a/images/ci/push_ci_flow.md5 b/images/ci/push_ci_flow.md5
index 40f0cb0..28f9e2a 100644
--- a/images/ci/push_ci_flow.md5
+++ b/images/ci/push_ci_flow.md5
@@ -1 +1 @@
-b879f9a8009677fc5248d849e15e4a4a  images/ci/push_ci_flow.mermaid
+b2c2b45380fb4510c7b3325cdb07c2f2  images/ci/push_ci_flow.mermaid
diff --git a/images/ci/push_ci_flow.mermaid b/images/ci/push_ci_flow.mermaid
index afb5389..aab67dc 100644
--- a/images/ci/push_ci_flow.mermaid
+++ b/images/ci/push_ci_flow.mermaid
@@ -16,85 +16,97 @@
 %% under the License.
 
 sequenceDiagram
-    Committer -->> Airflow Repo: Merge PR
+    Note over Airflow Repo: merge
+    Note over CI Build: push<br>[Write Token]
+    Note over Build Images: push<br>[Write Token]
     activate Airflow Repo
-    Airflow Repo -->> CI Build: Trigger 'schedled' event
+    Airflow Repo -->> CI Build: Trigger 'push'
     activate CI Build
+    Airflow Repo -->> Build Images: Trigger 'push'
     activate Build Images
-    Airflow Repo -->> Build Images: Trigger 'schedled' event
-    opt Failed builds
-        Note right of Build Images: Find failed<br>[CI Build]
-        Build Images -->> CI Build: Cancel
-    end
     Note over Build Images: Build info
-    par 3.6, 3.7, 3.8
-        Note over Build Images: Update constraints
-        Note over Build Images: Build PROD Images<br>[COMMIT_SHA]
-    and
-        Note over Build Images: Update constraints
+    par 3.6, 3.7, 3.8, 3.9
+        activate GitHub Registry
+        GitHub Registry ->> Build Images: Pull CI Images from Cache
+        deactivate GitHub Registry
         Note over Build Images: Build CI Images<br>[COMMIT_SHA]
     end
     par No CI image
-        Note over CI Build: Build info
+        Note over CI Build: Build info<br>All tests<br>All python
     and
-        Note over CI Build: Test always!
+        Note over CI Build: OpenAPI client gen
     and
-        Note over CI Build: Helm tests
+        Note over CI Build: Test UI
     and
-        Note over CI Build: OpenAPI client gen
+        Note over CI Build: Test examples<br>PROD image building
     end
-    par 3.6, 3.7, 3.8
+    par 3.6, 3.7, 3.8, 3.9
         Build Images ->> GitHub Registry: Push CI Images
         activate GitHub Registry
-    and
-        Build Images ->> GitHub Registry: Push PROD Images
+        Note over GitHub Registry: Tagged CI Images<br>[COMMIT_SHA]
     end
-    opt If any step failed
-        Build Images -->> CI Build: Cancel associated run
+    par 3.6, 3.7, 3.8, 3.9
+        GitHub Registry ->> Build Images: Pull PROD Images from Cache
+        Note over Build Images: Build PROD Images<br>[COMMIT_SHA]
     end
-    deactivate Build Images
-    Note over GitHub Registry: Tagged Images<br>[COMMIT_SHA]
-    loop Wait for images
-        par 3.6, 3.7, 3.8
-            CI Build ->> CI Build: Pull CI Images
-            Note over CI Build: Wait for<br>[COMMIT_SHA]
-        and
-            CI Build ->> CI Build: Pull PROD Images
+    loop Wait for CI images
+        par 3.6, 3.7, 3.8, 3.9
+            CI Build ->> CI Build: Check CI Images
             Note over CI Build: Wait for<br>[COMMIT_SHA]
         end
     end
-    par 3.6, 3.7, 3.8
-        GitHub Registry ->> CI Build: Pull CI Images
-    and
-        GitHub Registry ->> CI Build: Pull PROD Images
+    par 3.6, 3.7, 3.8, 3.9
+        GitHub Registry ->> CI Build: Pull CI Image [COMMIT_SHA]
+        Note over CI Build: Verify CI Image
     end
     deactivate GitHub Registry
-    par 3.6, 3.7, 3.8
+    par 3.6, 3.7, 3.8, 3.9
         Note over CI Build: Run static checks
     and
         Note over CI Build: Build docs
     and
-        Note over CI Build: Spell check docs
-    and
         Note over CI Build: Tests
     and
-        Note over CI Build: Kubernetes  Tests
+        Note over CI Build: Test provider <br>packages build
+    and
+        Note over CI Build: Helm tests
+    end
+    par 3.6, 3.7, 3.8, 3.9
+        Build Images ->> GitHub Registry: Push PROD Images
+        Note over GitHub Registry: Tagged PROD Images<br>[COMMIT_SHA]
+        activate GitHub Registry
+    end
+    deactivate Build Images
+    loop Wait for PROD images
+        par 3.6, 3.7, 3.8, 3.9
+            CI Build ->> CI Build: Check PROD Images
+            Note over CI Build: Wait for<br>[COMMIT_SHA]
+        end
+    end
+    par 3.6, 3.7, 3.8, 3.9
+        GitHub Registry ->> CI Build: Pull PROD Image [COMMIT_SHA]
+        Note over CI Build: Verify PROD Image
+    end
+    deactivate GitHub Registry
+    par 3.6, 3.7, 3.8, 3.9
+        Note over CI Build: Run Kubernetes <br>tests
+    and
+        Note over CI Build: Run Kubernetes <br>upgrade tests
     end
-    Note over CI Build: Quarantined tests
     Note over CI Build: Merge Coverage
     CI Build -->> Coverage.io: Upload Coverage
-    Note over CI Build: Tag image<br>[COMMIT_SHA]
-    par 3.6, 3.7, 3.8
-        CI Build ->> GitHub Registry: Push CI Images
+    par 3.6, 3.7, 3.8, 3.9
+        CI Build ->> GitHub Registry: Push CI Images to Cache
         activate GitHub Registry
     and
-        CI Build ->> GitHub Registry: Push PROD Images
-    end
-    Note over GitHub Registry: Tagged Images<br>[COMMIT_SHA]
-    par 3.6, 3.7, 3.8
-        CI Build ->> Airflow Repo: Push Constraints
+        CI Build ->> GitHub Registry: Push PROD Images to Cache
     end
+    Note over GitHub Registry: Tagged Images<br>[latest]
     deactivate GitHub Registry
-    CI Build -->> Airflow Repo: Status Check for CI Build
+    par 3.6, 3.7, 3.8, 3.9
+        Note over CI Build: Generate constraints
+        CI Build ->> Airflow Repo: Push constraints
+    end
+    CI Build -->> Airflow Repo: Status update
     deactivate Airflow Repo
     deactivate CI Build
diff --git a/images/ci/push_ci_flow.png b/images/ci/push_ci_flow.png
index a388b23..27a67d4 100644
Binary files a/images/ci/push_ci_flow.png and b/images/ci/push_ci_flow.png differ
diff --git a/images/ci/scheduled_ci_flow.md5 b/images/ci/scheduled_ci_flow.md5
index 0de07aa..8b54226 100644
--- a/images/ci/scheduled_ci_flow.md5
+++ b/images/ci/scheduled_ci_flow.md5
@@ -1 +1 @@
-5e470a0b524d58aa8e8946b570719c0d  images/ci/scheduled_ci_flow.mermaid
+d6379e426c5b3027fc10b7f83468b0d8  images/ci/scheduled_ci_flow.mermaid
diff --git a/images/ci/scheduled_ci_flow.mermaid b/images/ci/scheduled_ci_flow.mermaid
index f982a8f..13045af 100644
--- a/images/ci/scheduled_ci_flow.mermaid
+++ b/images/ci/scheduled_ci_flow.mermaid
@@ -16,94 +16,83 @@
 %% under the License.
 
 sequenceDiagram
-    CRON job -->> Airflow Repo: Nightly run
+    Note over Airflow Repo: scheduled
+    Note over CI Build: schedule<br>[Write Token]
+    Note over Build Images: schedule<br>[Write Token]
     activate Airflow Repo
-    Note over Airflow Repo: Trigger PR build
-    Note over CI Build: push<br>[Write Token]
-    Note over Build Images: workflow_run<br>[Write Token]
-    Airflow Repo -->> CI Build: Trigger 'pull_request' event
+    Airflow Repo -->> CI Build: Trigger 'schedule'
     activate CI Build
-    CI Build -->> Build Images: Trigger 'workflow_run' event
+    Airflow Repo -->> Build Images: Trigger 'schedule'
     activate Build Images
-    Note over Build Images: Find Duplicate<br>[CI Build]
-    opt Duplicated builds
-        Build Images -->> CI Build: Cancel
-    end
-    Note over Build Images: Find Duplicates<br>[Build Image]
-    opt Duplicated builds
-        Build Images -->> Build Images: Cancel
-    end
-    Note over Build Images: Find failed<br>[CI Build]
-    opt Failed builds
-        Build Images -->> CI Build: Cancel
-    end
-    Note over Build Images: Find failed<br>[Build Image]
-    opt Failed builds
-        Build Images -->> Build Images: Cancel
-    end
     Note over Build Images: Build info
-    par 3.6, 3.7, 3.8
-        Note over Build Images: Build from scratch
-        Note over Build Images: Update constraints
-        Note over Build Images: Build PROD Images<br>[GITHUB_RUN_ID]
-    and
-        Note over Build Images: Build from scratch
-        Note over Build Images: Update constraints
-        Note over Build Images: Build CI Images<br>[GITHUB_RUN_ID]
+    par 3.6, 3.7, 3.8, 3.9
+        Note over Build Images: Build CI Images<br>Cache disabled<br>[COMMIT_SHA]
     end
     par No CI image
-        Note over CI Build: Build info
+        Note over CI Build: Build info<br>All tests<br>All python
     and
-        Note over CI Build: Test always!
+        Note over CI Build: OpenAPI client gen
     and
-        Note over CI Build: Helm tests
+        Note over CI Build: Test UI
     and
-        Note over CI Build: OpenAPI client gen
+        Note over CI Build: Test examples<br>PROD image building
     end
-    par 3.6, 3.7, 3.8
+    par 3.6, 3.7, 3.8, 3.9
         Build Images ->> GitHub Registry: Push CI Images
         activate GitHub Registry
-    and
-        Build Images ->> GitHub Registry: Push PROD Images
+        Note over GitHub Registry: Tagged CI Images<br>[COMMIT_SHA]
     end
-    opt If any step failed
-        Build Images -->> CI Build: Cancel triggering run
+    par 3.6, 3.7, 3.8, 3.9
+        Note over Build Images: Build PROD Images<br>Cache disabled<br>[COMMIT_SHA]
     end
-    deactivate Build Images
-    Note over GitHub Registry: Tagged Images<br>[GITHUB_RUN_ID]
-    loop Wait for images
-        par 3.6, 3.7, 3.8
-            CI Build ->> CI Build: Pull CI Images
-            Note over CI Build: Wait for<br>[GITHUB_RUN_ID]
-        and
-            CI Build ->> CI Build: Pull PROD Images
-            Note over CI Build: Wait for<br>[GITHUB_RUN_ID]
+    loop Wait for CI images
+        par 3.6, 3.7, 3.8, 3.9
+            CI Build ->> CI Build: Check CI Images
+            Note over CI Build: Wait for<br>[COMMIT_SHA]
         end
     end
-    par 3.6, 3.7, 3.8
-        GitHub Registry ->> CI Build: Pull CI Images
-    and
-        GitHub Registry ->> CI Build: Pull PROD Images
+    par 3.6, 3.7, 3.8, 3.9
+        GitHub Registry ->> CI Build: Pull CI Image [COMMIT_SHA]
+        Note over CI Build: Verify CI Image
     end
     deactivate GitHub Registry
-    par 3.6, 3.7, 3.8
+    par 3.6, 3.7, 3.8, 3.9
         Note over CI Build: Run static checks
     and
         Note over CI Build: Build docs
     and
-        Note over CI Build: Spell check docs
-    and
         Note over CI Build: Tests
     and
-        Note over CI Build: Kubernetes  Tests
+        Note over CI Build: Test provider <br>packages build
+    and
+        Note over CI Build: Helm tests
+    end
+    par 3.6, 3.7, 3.8, 3.9
+        Build Images ->> GitHub Registry: Push PROD Images
+        activate GitHub Registry
+        Note over GitHub Registry: Tagged PROD Images<br>[COMMIT_SHA]
+    end
+    deactivate Build Images
+    loop Wait for PROD images
+        par 3.6, 3.7, 3.8, 3.9
+            CI Build ->> CI Build: Check PROD Images
+            Note over CI Build: Wait for<br>[COMMIT_SHA]
+        end
+    end
+    par 3.6, 3.7, 3.8, 3.9
+        GitHub Registry ->> CI Build: Pull PROD Image [COMMIT_SHA]
+        Note over CI Build: Verify PROD Image
+    end
+    deactivate GitHub Registry
+    par 3.6, 3.7, 3.8, 3.9
+        Note over CI Build: Run Kubernetes <br>tests
+    and
+        Note over CI Build: Run Kubernetes <br>upgrade tests
     end
-    Note over CI Build: Quarantined tests
-    Note over CI Build: Merge Coverage
-    CI Build -->> Coverage.io: Upload Coverage
-    par 3.6, 3.7, 3.8
-        CI Build ->> Airflow Repo: Push Constraints
+    par 3.6, 3.7, 3.8, 3.9
+        Note over CI Build: Generate constraints
+        CI Build ->> Airflow Repo: Push constraints
     end
-    CI Build ->> Airflow Repo: Push Nightly Tag
-    CI Build -->> Airflow Repo: Status Check for CI Build
+    CI Build -->> Airflow Repo: Status update
     deactivate Airflow Repo
     deactivate CI Build
diff --git a/images/ci/scheduled_ci_flow.png b/images/ci/scheduled_ci_flow.png
index e9bf21c..9e0be5b 100644
Binary files a/images/ci/scheduled_ci_flow.png and b/images/ci/scheduled_ci_flow.png differ
diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml
index 6b1cb4e..34cc100 100644
--- a/scripts/ci/docker-compose/base.yml
+++ b/scripts/ci/docker-compose/base.yml
@@ -18,7 +18,7 @@
 version: "2.2"
 services:
   airflow:
-    image: ${AIRFLOW_IMAGE}
+    image: ${AIRFLOW_CI_IMAGE}
     environment:
       - USER=root
       - ADDITIONAL_PATH=~/.local/bin
diff --git a/scripts/ci/docker-compose/local-prod.yml b/scripts/ci/docker-compose/local-prod.yml
deleted file mode 100644
index 79476d0..0000000
--- a/scripts/ci/docker-compose/local-prod.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
----
-version: "2.2"
-services:
-  airflow:
-    # We need to mount files an directories individually because some files
-    # such apache_airflow.egg-info should not be mounted from host
-    # we only mount those files that it makes sense to edit while developing
-    # or those that might be useful to see in the host as output of the
-    # tests (such as logs)
-    volumes:
-      - ../../../.bash_aliases:/root/.bash_aliases:cached
-      - ../../../.bash_history:/root/.bash_history:cached
-      - ../../../.github:/opt/airflow/.github:cached
-      - ../../../.inputrc:/root/.inputrc:cached
-      - ../../../.kube:/root/.kube:cached
-      - ../../../scripts/in_container/prod/entrypoint_prod.sh:/entrypoint:cached
-      - ../../../setup.cfg:/opt/airflow/setup.cfg:cached
-      - ../../../setup.py:/opt/airflow/setup.py:cached
-      - ../../../tests:/opt/airflow/tests:cached
-      - ../../../tmp:/tmp:cached
-      - ../../../metastore_browser:/opt/airflow/metastore_browser:cached
diff --git a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
index 415f7a5..a550038 100755
--- a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh
@@ -19,7 +19,7 @@
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
 # Builds or waits for the CI image in the CI environment
-# Depending on "USE_GITHUB_REGISTRY" and "GITHUB_REGISTRY_WAIT_FOR_IMAGE" setting
+# Depending on "GITHUB_REGISTRY_WAIT_FOR_IMAGE" setting
 function build_ci_image_on_ci() {
     build_images::prepare_ci_build
     start_end::group_start "Prepare CI image ${AIRFLOW_CI_IMAGE}"
@@ -27,7 +27,7 @@ function build_ci_image_on_ci() {
     rm -rf "${BUILD_CACHE_DIR}"
     mkdir -pv "${BUILD_CACHE_DIR}"
 
-    if [[ ${USE_GITHUB_REGISTRY} == "true" && ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then
+    if [[ ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then
         # Pretend that the image was build. We already have image with the right sources baked in!
         md5sum::calculate_md5sum_for_all_files
 
@@ -40,12 +40,12 @@ function build_ci_image_on_ci() {
         fi
         # first we pull base python image. We will need it to re-push it after main build
         # Becoming the new "latest" image for other builds
-        build_images::wait_for_image_tag "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}" \
-            "${python_tag_suffix}" "${AIRFLOW_PYTHON_BASE_IMAGE}"
+        build_images::wait_for_image_tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \
+            "${python_tag_suffix}"
 
         # And then the actual image
-        build_images::wait_for_image_tag "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}" \
-            ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${AIRFLOW_CI_IMAGE}"
+        build_images::wait_for_image_tag "${AIRFLOW_CI_IMAGE}" \
+            ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 
         md5sum::update_all_md5_with_group
     else
diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
index fe8b489..dbcb07d 100755
--- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
@@ -28,11 +28,10 @@ export VERBOSE="true"
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
 # Builds or waits for the PROD image in the CI environment
-# Depending on the "USE_GITHUB_REGISTRY" and "GITHUB_REGISTRY_WAIT_FOR_IMAGE" setting
 function build_prod_images_on_ci() {
     build_images::prepare_prod_build
 
-    if [[ ${USE_GITHUB_REGISTRY} == "true" && ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then
+    if [[ ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then
         # Tries to wait for the images indefinitely
         # skips further image checks - since we already have the target image
 
@@ -44,19 +43,19 @@ function build_prod_images_on_ci() {
         if [[ "${WAIT_FOR_PYTHON_BASE_IMAGE=}" == "true" ]]; then
             # first we pull base python image. We will need it to re-push it after main build
             # Becoming the new "latest" image for other builds
-            build_images::wait_for_image_tag "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}" \
-                "${python_tag_suffix}" "${AIRFLOW_PYTHON_BASE_IMAGE}"
+            build_images::wait_for_image_tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \
+                "${python_tag_suffix}"
         fi
 
         # And then the actual image
-        build_images::wait_for_image_tag "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}" \
-            ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${AIRFLOW_PROD_IMAGE}"
+        build_images::wait_for_image_tag "${AIRFLOW_PROD_IMAGE}" \
+            ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 
         # And the prod build image
         if [[ "${WAIT_FOR_PROD_BUILD_IMAGE=}" == "true" ]]; then
             # If specified in variable - also waits for the build image
-            build_images::wait_for_image_tag "${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE}" \
-                ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${AIRFLOW_PROD_BUILD_IMAGE}"
+            build_images::wait_for_image_tag "${AIRFLOW_PROD_BUILD_IMAGE}" \
+                ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
         fi
 
     else
diff --git a/scripts/ci/images/ci_push_ci_images.sh b/scripts/ci/images/ci_push_ci_images.sh
index df2ae17..5bd5eb3 100755
--- a/scripts/ci/images/ci_push_ci_images.sh
+++ b/scripts/ci/images/ci_push_ci_images.sh
@@ -20,4 +20,4 @@
 
 build_images::prepare_ci_build
 
-push_pull_remove_images::push_ci_images
+push_pull_remove_images::push_ci_images_to_github
diff --git a/scripts/ci/images/ci_push_production_images.sh b/scripts/ci/images/ci_push_production_images.sh
index 31216c3..43e14d3 100755
--- a/scripts/ci/images/ci_push_production_images.sh
+++ b/scripts/ci/images/ci_push_production_images.sh
@@ -20,4 +20,4 @@
 
 build_images::prepare_prod_build
 
-push_pull_remove_images::push_prod_images
+push_pull_remove_images::push_prod_images_to_github
diff --git a/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh b/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
index 9047665..105bcfb 100755
--- a/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
+++ b/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
@@ -29,28 +29,24 @@ shift
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
 function pull_ci_image() {
-    local image_name_with_tag="${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+    local image_name_with_tag="${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
     start_end::group_start "Pulling ${image_name_with_tag} image"
-    push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" "${image_name_with_tag}"
+    push_pull_remove_images::pull_image_if_not_present_or_forced "${image_name_with_tag}"
     start_end::group_end
 
 }
 
-push_pull_remove_images::check_if_github_registry_wait_for_image_enabled
-
 start_end::group_start "Configure Docker Registry"
 build_images::configure_docker_registry
 start_end::group_end
 
-export AIRFLOW_CI_IMAGE_NAME="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
+start_end::group_start "Waiting for ${AIRFLOW_CI_IMAGE}"
 
-start_end::group_start "Waiting for ${AIRFLOW_CI_IMAGE_NAME} image to appear"
-push_pull_remove_images::wait_for_github_registry_image \
-    "${AIRFLOW_CI_IMAGE_NAME}${GITHUB_REGISTRY_IMAGE_SUFFIX}" "${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+push_pull_remove_images::wait_for_image "${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 build_images::prepare_ci_build
 pull_ci_image
 start_end::group_end
 
 if [[ ${VERIFY_IMAGE=} != "false" ]]; then
-    verify_image::verify_ci_image "${AIRFLOW_CI_IMAGE}"
+    verify_image::verify_ci_image "${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 fi
diff --git a/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh b/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
index 92dbf2b..482b0a5 100755
--- a/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
+++ b/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
@@ -28,27 +28,22 @@ shift
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
-push_pull_remove_images::check_if_github_registry_wait_for_image_enabled
-
 start_end::group_start "Configure Docker Registry"
 build_images::configure_docker_registry
 start_end::group_end
 
-export AIRFLOW_PROD_IMAGE_NAME="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
-
-start_end::group_start "Waiting for ${AIRFLOW_PROD_IMAGE_NAME} image to appear"
+start_end::group_start "Waiting for ${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 
-push_pull_remove_images::wait_for_github_registry_image \
-    "${AIRFLOW_PROD_IMAGE_NAME}${GITHUB_REGISTRY_IMAGE_SUFFIX}" "${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+push_pull_remove_images::wait_for_image "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 start_end::group_end
 
 start_end::group_start "Pulling the PROD Image"
 build_images::prepare_prod_build
-image_name_with_tag="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+image_name_with_tag="${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 verbosity::print_info "Pulling the ${image_name_with_tag} image and tagging with ${AIRFLOW_PROD_IMAGE}"
-push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_IMAGE}" "${image_name_with_tag}"
+push_pull_remove_images::pull_image_if_not_present_or_forced "${image_name_with_tag}"
 start_end::group_end
 
 if [[ ${VERIFY_IMAGE=} != "false" ]]; then
-    verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}"
+    verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
 fi
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 0a7f6bf..ed6af67 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -241,9 +241,8 @@ function build_images::build_ci_image_manifest() {
         --tag="${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" \
         -f- . <<EOF
 FROM scratch
-
 COPY "manifests/local-build-cache-hash" /build-cache-hash
-
+LABEL org.opencontainers.image.source="https://github.com/${GITHUB_REPOSITORY}"
 CMD ""
 EOF
 }
@@ -348,6 +347,17 @@ function build_images::print_build_info() {
     verbosity::print_info
 }
 
+# Retrieves GitHub Container Registry image prefix from repository name
+# GitHub Container Registry stores all images at the organization level, they are just
+# linked to the repository via docker label - however we assume a convention where we will
+# add repository name to organisation separated by '-' and convert everything to lowercase
+# this is because in order for it to work for internal PR for users or other organisation's
+# repositories, the other organisations and repositories can be uppercase
+# container registry image name has to be lowercase
+function build_images::get_github_container_registry_image_prefix() {
+    echo "${GITHUB_REPOSITORY}" | tr '[:upper:]' '[:lower:]'
+}
+
 function build_images::get_docker_image_names() {
     # python image version to use
     export PYTHON_BASE_IMAGE_VERSION=${PYTHON_BASE_IMAGE_VERSION:=${PYTHON_MAJOR_MINOR_VERSION}}
@@ -355,103 +365,73 @@ function build_images::get_docker_image_names() {
     # Python base image to use
     export PYTHON_BASE_IMAGE="python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
 
+    local image_name
+    image_name="${GITHUB_REGISTRY}/$(build_images::get_github_container_registry_image_prefix)"
+
     # CI image base tag
     export AIRFLOW_CI_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
-    # CI image to build
-    export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}"
-    # Default CI image
-    export AIRFLOW_PYTHON_BASE_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:python${PYTHON_MAJOR_MINOR_VERSION}-${BRANCH_NAME}"
-    # CI image to build
-    export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}"
-
-    # Base production image tag - used to build kubernetes tag as well
-    if [[ -z "${FORCE_AIRFLOW_PROD_BASE_TAG=}" ]]; then
-        export AIRFLOW_PROD_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
-    else
-        export AIRFLOW_PROD_BASE_TAG="${FORCE_AIRFLOW_PROD_BASE_TAG}"
-    fi
-
-    # PROD image to build
-    export AIRFLOW_PROD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}"
 
-    # PROD build segment
-    export AIRFLOW_PROD_BUILD_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}-build"
+    # Example:
+    #  ghcr.io/apache/airflow-main-python3.8-ci-v2
+    export AIRFLOW_CI_IMAGE="${image_name}-${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}"
 
-    # PROD Kubernetes image to build
-    export AIRFLOW_PROD_IMAGE_KUBERNETES="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_PROD_BASE_TAG}-kubernetes"
+    export AIRFLOW_CI_LOCAL_MANIFEST_IMAGE="local-airflow-ci-manifest:${AIRFLOW_CI_BASE_TAG}"
 
-    # PROD default image
-    export AIRFLOW_PROD_IMAGE_DEFAULT="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}"
+    # Example:
+    #  ghcr.io/apache/airflow-main-python3.8-ci-v2-manifest
+    export AIRFLOW_CI_REMOTE_MANIFEST_IMAGE="${image_name}-${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}-manifest"
 
     # File that is touched when the CI image is built for the first time locally
     export BUILT_CI_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"
 
-    local image_name
-    image_name="${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)"
-    local image_separator
-    if [[ ${GITHUB_REGISTRY} == "ghcr.io" ]]; then
-        image_separator="-"
-    elif [[ ${GITHUB_REGISTRY} == "docker.pkg.github.com" ]]; then
-        image_separator="/"
-    else
-        echo
-        echo  "${COLOR_RED}ERROR: Bad value of '${GITHUB_REGISTRY}'. Should be either 'ghcr.io' or 'docker.pkg.github.com'!${COLOR_RESET}"
-        echo
-        exit 1
-    fi
+    # PROD image to build
+    export AIRFLOW_PROD_BASE_TAG="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}"
 
     # Example:
-    #  docker.pkg.github.com/apache/airflow/main-python3.6-v2
     #  ghcr.io/apache/airflow-v2-1-test-python-v2:3.6-slim-buster
-    #  ghcr.io/apache/airflow-python-v2:3.6-slim-buster-<COMMIT_SHA>
-    export GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE="${image_name}${image_separator}${AIRFLOW_PROD_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}"
+    export AIRFLOW_PROD_IMAGE="${image_name}-${AIRFLOW_PROD_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}"
+
+    # PROD Kubernetes image to build
+    export AIRFLOW_PROD_IMAGE_KUBERNETES="${AIRFLOW_PROD_IMAGE}-kubernetes"
+
     # Example:
-    #   docker.pkg.github.com/apache/airflow/main-python3.6-build-v2
     #   ghcr.io/apache/airflow-main-python3.6-build-v2
-    export GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE="${image_name}${image_separator}${AIRFLOW_PROD_BASE_TAG}-build${GITHUB_REGISTRY_IMAGE_SUFFIX}"
+    export AIRFLOW_PROD_BUILD_IMAGE="${image_name}-${AIRFLOW_PROD_BASE_TAG}-build${GITHUB_REGISTRY_IMAGE_SUFFIX}"
 
     # Example:
-    #  docker.pkg.github.com/apache/airflow/python-v2:3.6-slim-buster
     #  ghcr.io/apache/airflow-python-v2:3.6-slim-buster
-    #  ghcr.io/apache/airflow-python-v2:3.6-slim-buster-<COMMIT_SHA>
-    export GITHUB_REGISTRY_PYTHON_BASE_IMAGE="${image_name}${image_separator}python${GITHUB_REGISTRY_IMAGE_SUFFIX}:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
+    export AIRFLOW_PYTHON_BASE_IMAGE="${image_name}-python${GITHUB_REGISTRY_IMAGE_SUFFIX}:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
+
 
-    # Example:
-    #  docker.pkg.github.com/apache/airflow/main-python3.8-ci-v2
-    export GITHUB_REGISTRY_AIRFLOW_CI_IMAGE="${image_name}${image_separator}${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}"
 }
 
 # If GitHub Registry is used, login to the registry using GITHUB_USERNAME and
 # GITHUB_TOKEN. In case Personal Access token is not set, skip logging in
 # Also enable experimental features of docker (we need `docker manifest` command)
 function build_images::configure_docker_registry() {
-    if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then
-        local token="${GITHUB_TOKEN}"
-        if [[ -z "${token}" ]] ; then
-            verbosity::print_info
-            verbosity::print_info "Skip logging in to GitHub Registry. No Token available!"
-            verbosity::print_info
-        fi
-        if [[ -n "${token}" ]]; then
-            echo "${token}" | docker_v login \
-                --username "${GITHUB_USERNAME:-apache}" \
-                --password-stdin \
-                "${GITHUB_REGISTRY}"
-        else
-            verbosity::print_info "Skip Login to GitHub Registry ${GITHUB_REGISTRY} as token is missing"
-        fi
-        local new_config
-        new_config=$(jq '.experimental = "enabled"' "${HOME}/.docker/config.json")
-        echo "${new_config}" > "${HOME}/.docker/config.json"
+    local token="${GITHUB_TOKEN}"
+    if [[ -z "${token}" ]] ; then
+        verbosity::print_info
+        verbosity::print_info "Skip logging in to GitHub Registry. No Token available!"
+        verbosity::print_info
+    fi
+    if [[ -n "${token}" ]]; then
+        echo "${token}" | docker_v login \
+            --username "${GITHUB_USERNAME:-apache}" \
+            --password-stdin \
+            "${GITHUB_REGISTRY}"
+    else
+        verbosity::print_info "Skip Login to GitHub Registry ${GITHUB_REGISTRY} as token is missing"
     fi
+    local new_config
+    new_config=$(jq '.experimental = "enabled"' "${HOME}/.docker/config.json")
+    echo "${new_config}" > "${HOME}/.docker/config.json"
 }
 
 
 # Prepares all variables needed by the CI build. Depending on the configuration used (python version
 # DockerHub user etc. the variables are set so that other functions can use those variables.
 function build_images::prepare_ci_build() {
-    export AIRFLOW_CI_LOCAL_MANIFEST_IMAGE="local/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}-manifest"
-    export AIRFLOW_CI_REMOTE_MANIFEST_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}-manifest"
     export THE_IMAGE_TYPE="CI"
     export IMAGE_DESCRIPTION="Airflow CI"
 
@@ -459,9 +439,6 @@ function build_images::prepare_ci_build() {
     export AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS:="${DEFAULT_CI_EXTRAS}"}"
     readonly AIRFLOW_EXTRAS
 
-    export AIRFLOW_IMAGE="${AIRFLOW_CI_IMAGE}"
-    readonly AIRFLOW_IMAGE
-
     build_images::configure_docker_registry
     sanity_checks::go_to_airflow_sources
     permissions::fix_group_permissions
@@ -525,7 +502,7 @@ function build_images::rebuild_ci_image_if_needed() {
                 local root_files_count
                 root_files_count=$(find "airflow" "tests" -user root | wc -l | xargs)
                 if [[ ${root_files_count} != "0" ]]; then
-                    ./scripts/ci/tools/ci_fix_ownership.sh || true
+                    ./scripts/ci/tools/fix_ownership.sh || true
                 fi
             fi
             verbosity::print_info
@@ -595,18 +572,6 @@ function build_images::rebuild_ci_image_if_needed_and_confirmed() {
     fi
 }
 
-# Retrieves GitHub Container Registry image prefix from repository name
-# GitHub Container Registry stores all images at the organization level, they are just
-# linked to the repository via docker label - however we assume a convention where we will
-# add repository name to organisation separated by '-' and convert everything to lowercase
-# this is because in order for it to work for internal PR for users or other organisation's
-# repositories, the other organisations and repositories can be uppercase
-# container registry image name has to be lowercase
-function get_github_container_registry_image_prefix() {
-    echo "${GITHUB_REPOSITORY}" | tr '[:upper:]' '[:lower:]'
-}
-
-
 # Builds CI image - depending on the caching strategy (pulled, local, disabled) it
 # passes the necessary docker build flags via DOCKER_CACHE_CI_DIRECTIVE array
 # it also passes the right Build args depending on the configuration of the build
@@ -716,10 +681,6 @@ Docker building ${AIRFLOW_CI_IMAGE}.
         --target "main" \
         . -f Dockerfile.ci
     set -u
-    if [[ -n "${DEFAULT_CI_IMAGE=}" ]]; then
-        echo "Tagging additionally image ${AIRFLOW_CI_IMAGE} with ${DEFAULT_CI_IMAGE}"
-        docker_v tag "${AIRFLOW_CI_IMAGE}" "${DEFAULT_CI_IMAGE}"
-    fi
     if [[ -n "${IMAGE_TAG=}" ]]; then
         echo "Tagging additionally image ${AIRFLOW_CI_IMAGE} with ${IMAGE_TAG}"
         docker_v tag "${AIRFLOW_CI_IMAGE}" "${IMAGE_TAG}"
@@ -769,11 +730,6 @@ function build_images::prepare_prod_build() {
             "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}"
         )
     fi
-    if [[ "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" == "${PYTHON_MAJOR_MINOR_VERSION}" ]]; then
-        export DEFAULT_CI_IMAGE="${AIRFLOW_PROD_IMAGE_DEFAULT}"
-    else
-        export DEFAULT_CI_IMAGE=""
-    fi
     export THE_IMAGE_TYPE="PROD"
     export IMAGE_DESCRIPTION="Airflow production"
 
@@ -781,9 +737,6 @@ function build_images::prepare_prod_build() {
     export AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS:="${DEFAULT_PROD_EXTRAS}"}"
     readonly AIRFLOW_EXTRAS
 
-    export AIRFLOW_IMAGE="${AIRFLOW_PROD_IMAGE}"
-    readonly AIRFLOW_IMAGE
-
     build_images::configure_docker_registry
     AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="${BRANCH_NAME}"
     sanity_checks::go_to_airflow_sources
@@ -906,10 +859,6 @@ function build_images::build_prod_images() {
         --target "main" \
         . -f Dockerfile
     set -u
-    if [[ -n "${DEFAULT_PROD_IMAGE:=}" ]]; then
-        echo "Tagging additionally image ${AIRFLOW_PROD_IMAGE} with ${DEFAULT_PROD_IMAGE}"
-        docker_v tag "${AIRFLOW_PROD_IMAGE}" "${DEFAULT_PROD_IMAGE}"
-    fi
     if [[ -n "${IMAGE_TAG=}" ]]; then
         echo "Tagging additionally image ${AIRFLOW_PROD_IMAGE} with ${IMAGE_TAG}"
         docker_v tag "${AIRFLOW_PROD_IMAGE}" "${IMAGE_TAG}"
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 9895d8d..2a38b33 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -167,17 +167,6 @@ function initialization::initialize_branch_variables() {
     export BRANCH_NAME=${BRANCH_NAME:=${DEFAULT_BRANCH}}
 }
 
-# Determine dockerhub user/repo used for push/pull
-function initialization::initialize_dockerhub_variables() {
-    # You can override DOCKERHUB_USER to use your own DockerHub account and play with your
-    # own docker images. In this case you can build images locally and push them
-    export DOCKERHUB_USER=${DOCKERHUB_USER:="apache"}
-
-    # You can override DOCKERHUB_REPO to use your own DockerHub repository and play with your
-    # own docker images. In this case you can build images locally and push them
-    export DOCKERHUB_REPO=${DOCKERHUB_REPO:="airflow-ci"}
-}
-
 # Determine available integrations
 function initialization::initialize_available_integrations() {
     export AVAILABLE_INTEGRATIONS="cassandra kerberos mongo openldap pinot rabbitmq redis statsd trino"
@@ -276,7 +265,7 @@ function initialization::initialize_force_variables() {
 
     # By default we do not pull python base image. We should do that only when we run upgrade check in
     # CI main and when we manually refresh the images to latest versions
-    export FORCE_PULL_BASE_PYTHON_IMAGE="false"
+    export CHECK_IF_BASE_PYTHON_IMAGE_UPDATED="false"
 
     # Determines whether to force build without checking if it is needed
     # Can be overridden by '--force-build-images' flag.
@@ -545,7 +534,6 @@ function initialization::initialize_git_variables() {
 function initialization::initialize_github_variables() {
     # Defaults for interacting with GitHub
     export GITHUB_REGISTRY="ghcr.io"
-    export USE_GITHUB_REGISTRY=${USE_GITHUB_REGISTRY:="false"}
     export GITHUB_REGISTRY_IMAGE_SUFFIX=${GITHUB_REGISTRY_IMAGE_SUFFIX:="-v2"}
     export GITHUB_REGISTRY_WAIT_FOR_IMAGE=${GITHUB_REGISTRY_WAIT_FOR_IMAGE:="false"}
     export GITHUB_REGISTRY_PULL_IMAGE_TAG=${GITHUB_REGISTRY_PULL_IMAGE_TAG:="latest"}
@@ -601,7 +589,6 @@ function initialization::initialize_common_environment() {
     initialization::initialize_branch_variables
     initialization::initialize_available_integrations
     initialization::initialize_files_for_rebuild_check
-    initialization::initialize_dockerhub_variables
     initialization::initialize_mount_variables
     initialization::initialize_force_variables
     initialization::initialize_host_variables
@@ -635,11 +622,6 @@ Basic variables:
     DB_RESET: ${DB_RESET}
     START_AIRFLOW: ${START_AIRFLOW}
 
-DockerHub variables:
-
-    DOCKERHUB_USER=${DOCKERHUB_USER}
-    DOCKERHUB_REPO=${DOCKERHUB_REPO}
-
 Mount variables:
 
     MOUNT_SELECTED_LOCAL_SOURCES: ${MOUNT_SELECTED_LOCAL_SOURCES}
@@ -707,10 +689,8 @@ Production image build variables:
 
 Detected GitHub environment:
 
-    USE_GITHUB_REGISTRY: '${USE_GITHUB_REGISTRY}'
     GITHUB_REPOSITORY: '${GITHUB_REPOSITORY}'
     GITHUB_USERNAME: '${GITHUB_USERNAME}'
-    GITHUB_TOKEN: '${GITHUB_TOKEN}'
     GITHUB_REGISTRY_WAIT_FOR_IMAGE: '${GITHUB_REGISTRY_WAIT_FOR_IMAGE}'
     GITHUB_REGISTRY_PULL_IMAGE_TAG: '${GITHUB_REGISTRY_PULL_IMAGE_TAG}'
     GITHUB_REGISTRY_PUSH_IMAGE_TAG: '${GITHUB_REGISTRY_PUSH_IMAGE_TAG}'
@@ -849,11 +829,8 @@ function initialization::make_constants_read_only() {
     readonly ADDITIONAL_RUNTIME_APT_DEPS
     readonly ADDITIONAL_RUNTIME_APT_ENV
 
-    readonly DOCKERHUB_USER
-    readonly DOCKERHUB_REPO
     readonly DOCKER_CACHE
 
-    readonly USE_GITHUB_REGISTRY
     readonly GITHUB_REGISTRY
     readonly GITHUB_REGISTRY_WAIT_FOR_IMAGE
     readonly GITHUB_REGISTRY_PULL_IMAGE_TAG
@@ -864,7 +841,6 @@ function initialization::make_constants_read_only() {
     readonly GITHUB_USERNAME
 
     readonly FORWARD_CREDENTIALS
-    readonly USE_GITHUB_REGISTRY
 
     readonly EXTRA_STATIC_CHECK_OPTIONS
 
@@ -872,15 +848,9 @@ function initialization::make_constants_read_only() {
 
     readonly PYTHON_BASE_IMAGE_VERSION
     readonly PYTHON_BASE_IMAGE
-    readonly AIRFLOW_PYTHON_BASE_IMAGE
     readonly AIRFLOW_CI_BASE_TAG
-    readonly AIRFLOW_CI_IMAGE
-    readonly AIRFLOW_CI_IMAGE_DEFAULT
     readonly AIRFLOW_PROD_BASE_TAG
-    readonly AIRFLOW_PROD_IMAGE
-    readonly AIRFLOW_PROD_BUILD_IMAGE
     readonly AIRFLOW_PROD_IMAGE_KUBERNETES
-    readonly AIRFLOW_PROD_IMAGE_DEFAULT
     readonly BUILT_CI_IMAGE_FLAG_FILE
     readonly INIT_SCRIPT_FILE
 
diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh
index 7e06ed3..970a6d3 100644
--- a/scripts/ci/libraries/_kind.sh
+++ b/scripts/ci/libraries/_kind.sh
@@ -258,7 +258,7 @@ function kind::check_cluster_ready_for_airflow() {
 
 function kind::build_image_for_kubernetes_tests() {
     cd "${AIRFLOW_SOURCES}" || exit 1
-    docker_v build --tag "${AIRFLOW_PROD_IMAGE_KUBERNETES}" . -f - <<EOF
+    docker_v build --tag "${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest" . -f - <<EOF
 FROM ${AIRFLOW_PROD_IMAGE}
 
 COPY airflow/example_dags/ \${AIRFLOW_HOME}/dags/
@@ -266,11 +266,11 @@ COPY airflow/example_dags/ \${AIRFLOW_HOME}/dags/
 COPY airflow/kubernetes_executor_templates/ \${AIRFLOW_HOME}/pod_templates/
 
 EOF
-    echo "The ${AIRFLOW_PROD_IMAGE_KUBERNETES} is prepared for test kubernetes deployment."
+    echo "The ${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest is prepared for test kubernetes deployment."
 }
 
 function kind::load_image_to_kind_cluster() {
-    kind load docker-image --name "${KIND_CLUSTER_NAME}" "${AIRFLOW_PROD_IMAGE_KUBERNETES}"
+    kind load docker-image --name "${KIND_CLUSTER_NAME}" "${AIRFLOW_PROD_IMAGE_KUBERNETES}:latest"
 }
 
 MAX_NUM_TRIES_FOR_HEALTH_CHECK=12
@@ -338,10 +338,10 @@ function kind::deploy_airflow_with_helm() {
     helm install airflow . \
         --timeout 10m0s \
         --namespace "${HELM_AIRFLOW_NAMESPACE}" \
-        --set "defaultAirflowRepository=${DOCKERHUB_USER}/${DOCKERHUB_REPO}" \
-        --set "images.airflow.repository=${DOCKERHUB_USER}/${DOCKERHUB_REPO}" \
-        --set "images.airflow.tag=${AIRFLOW_PROD_BASE_TAG}-kubernetes" -v 1 \
-        --set "defaultAirflowTag=${AIRFLOW_PROD_BASE_TAG}-kubernetes" -v 1 \
+        --set "defaultAirflowRepository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \
+        --set "images.airflow.repository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \
+        --set "images.airflow.tag=latest" -v 1 \
+        --set "defaultAirflowTag=latest" -v 1 \
         --set "config.api.auth_backend=airflow.api.auth.backend.basic_auth" \
         --set "config.logging.logging_level=DEBUG" \
         --set "executor=${EXECUTOR}"
@@ -354,3 +354,31 @@ function kind::deploy_test_kubernetes_resources() {
     kubectl apply -f "scripts/ci/kubernetes/volumes.yaml" --namespace default
     kubectl apply -f "scripts/ci/kubernetes/nodeport.yaml" --namespace airflow
 }
+
+function kind::upgrade_airflow_with_helm() {
+    local mode=$1
+    echo
+    echo "Upgrading airflow with ${mode}"
+    echo
+    local chartdir
+    chartdir=$(mktemp -d)
+    traps::add_trap "rm -rf ${chartdir}" EXIT INT HUP TERM
+    # Copy chart to temporary directory to allow chart deployment in parallel
+    # Otherwise helm deployment will fail on renaming charts to tmpcharts
+    cp -r "${AIRFLOW_SOURCES}/chart" "${chartdir}"
+
+    pushd "${chartdir}/chart" >/dev/null 2>&1 || exit 1
+    helm repo add stable https://charts.helm.sh/stable/
+    helm dep update
+    helm upgrade airflow . --namespace "${HELM_AIRFLOW_NAMESPACE}" \
+        --set "defaultAirflowRepository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \
+        --set "images.airflow.repository=${AIRFLOW_PROD_IMAGE_KUBERNETES}" \
+        --set "images.airflow.tag=latest" -v 1 \
+        --set "defaultAirflowTag=latest" -v 1 \
+        --set "config.api.auth_backend=airflow.api.auth.backend.basic_auth" \
+        --set "config.logging.logging_level=DEBUG" \
+        --set "executor=${mode}"
+
+    echo
+    popd > /dev/null 2>&1|| exit 1
+}
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index f8a1d3e..630b5cc 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -84,51 +84,37 @@ ${COLOR_RESET}
     fi
 }
 
-# Pulls image if needed but tries to pull it from GitHub registry before
-# It attempts to pull it from the DockerHub registry. This is used to speed up the builds
-# In GitHub Actions and to pull appropriately tagged builds.
-# Parameters:
-#   $1 -> DockerHub image to pull
-#   $2 -> GitHub image to try to pull first
-function push_pull_remove_images::pull_image_github_dockerhub() {
-    local dockerhub_image="${1}"
-    local github_image="${2}"
-
-    set +e
-    if push_pull_remove_images::pull_image_if_not_present_or_forced "${github_image}"; then
-        # Tag the image to be the DockerHub one
-        docker_v tag "${github_image}" "${dockerhub_image}"
-    else
-        push_pull_remove_images::pull_image_if_not_present_or_forced "${dockerhub_image}"
-    fi
-    set -e
-}
-
-# Rebuilds python base image from the latest available Python version
-function push_pull_remove_images::rebuild_python_base_image() {
-   echo
-   echo "Rebuilding ${AIRFLOW_PYTHON_BASE_IMAGE} from latest ${PYTHON_BASE_IMAGE}"
-   echo
+# Rebuilds python base image from the latest available Python version if it has been updated
+function push_pull_remove_images::check_and_rebuild_python_base_image_if_needed() {
    docker_v pull "${PYTHON_BASE_IMAGE}"
-   echo "FROM ${PYTHON_BASE_IMAGE}" | \
-        docker_v build \
-            --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \
-            -t "${AIRFLOW_PYTHON_BASE_IMAGE}" -
+   local dockerhub_python_version
+   dockerhub_python_version=$(docker run "${PYTHON_BASE_IMAGE}" python -c 'import sys; print(sys.version)')
+   local local_python_version
+   local_python_version=$(docker run "${AIRFLOW_PYTHON_BASE_IMAGE}" python -c 'import sys; print(sys.version)')
+   if [[ ${local_python_version} != "${dockerhub_python_version}" ]]; then
+       echo
+       echo "There is a new Python Base image updated!"
+       echo "The version used in Airflow: ${local_python_version}"
+       echo "The version available in DockerHub: ${dockerhub_python_version}"
+       echo "Rebuilding ${AIRFLOW_PYTHON_BASE_IMAGE} from the latest ${PYTHON_BASE_IMAGE}"
+       echo
+       echo "FROM ${PYTHON_BASE_IMAGE}" | \
+            docker_v build \
+                --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \
+                -t "${AIRFLOW_PYTHON_BASE_IMAGE}" -
+  fi
 }
 
 # Pulls the base Python image. This image is used as base for CI and PROD images, depending on the parameters used:
 #
-# * if FORCE_PULL_BASE_PYTHON_IMAGE != false, then it rebuild the image using latest Python image available
-#     and adds `org.opencontainers.image.source` label to it, so that it is linked to Airflow
-#     repository when we push it to GHCR registry
-# * Otherwise it pulls the Python base image from either GitHub registry or from DockerHub
-#     depending on USE_GITHUB_REGISTRY variable. In case we pull specific build image (via suffix)
+# * if CHECK_IF_BASE_PYTHON_IMAGE_UPDATED == "true", then it checks if new image of Python has been released
+#     in DockerHub and it will rebuild the base python image and add the `org.opencontainers.image.source`
+#     label to it, so that it is linked to Airflow repository when we push it to the
+#     Github Container registry
+# * Otherwise it pulls the Python base image from GitHub Container Registry registry.
+#     In case we pull specific build image (via suffix)
 #     it will pull the right image using the specified suffix
 function push_pull_remove_images::pull_base_python_image() {
-    if [[ ${FORCE_PULL_BASE_PYTHON_IMAGE} == "true" ]] ; then
-        push_pull_remove_images::rebuild_python_base_image
-        return
-    fi
     echo
     echo "Docker pulling base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}"
     echo
@@ -136,15 +122,20 @@ function push_pull_remove_images::pull_base_python_image() {
         echo -n "Docker pulling base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}
 " > "${DETECTED_TERMINAL}"
     fi
-    if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then
-        local python_tag_suffix=""
-        if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then
-            python_tag_suffix="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+    if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then
+        push_pull_remove_images::pull_image_if_not_present_or_forced \
+            "${AIRFLOW_PYTHON_BASE_IMAGE}${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+        if [[ ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]] ; then
+            echo
+            echo  "${COLOR_RED}ERROR: You cannot check for base python image if you pull specific tag: ${GITHUB_REGISTRY_PULL_IMAGE_TAG}.${COLOR_RESET}"
+            echo
+            return 1
         fi
-        push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PYTHON_BASE_IMAGE}" \
-            "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${python_tag_suffix}"
     else
-        docker_v pull "${AIRFLOW_PYTHON_BASE_IMAGE}" || true
+        push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_PYTHON_BASE_IMAGE}"
+        if [[ ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]] ; then
+            push_pull_remove_images::check_and_rebuild_python_base_image_if_needed
+        fi
     fi
 }
 
@@ -153,16 +144,12 @@ function push_pull_remove_images::pull_ci_images_if_needed() {
     local python_image_hash
     python_image_hash=$(docker images -q "${AIRFLOW_PYTHON_BASE_IMAGE}" 2> /dev/null || true)
     if [[ -z "${python_image_hash=}" || "${FORCE_PULL_IMAGES}" == "true" || \
-            ${FORCE_PULL_BASE_PYTHON_IMAGE} == "true" ]]; then
+            ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]]; then
         push_pull_remove_images::pull_base_python_image
     fi
     if [[ "${DOCKER_CACHE}" == "pulled" ]]; then
-        if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then
-            push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" \
-                "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-        else
-            push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_CI_IMAGE}" || true
-        fi
+        push_pull_remove_images::pull_image_if_not_present_or_forced \
+            "${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
     fi
 }
 
@@ -172,37 +159,19 @@ function push_pull_remove_images::pull_prod_images_if_needed() {
     local python_image_hash
     python_image_hash=$(docker images -q "${AIRFLOW_PYTHON_BASE_IMAGE}" 2> /dev/null || true)
     if [[ -z "${python_image_hash=}" || "${FORCE_PULL_IMAGES}" == "true"  || \
-            ${FORCE_PULL_BASE_PYTHON_IMAGE} == "true" ]]; then
+            ${CHECK_IF_BASE_PYTHON_IMAGE_UPDATED} == "true" ]]; then
         push_pull_remove_images::pull_base_python_image
     fi
     if [[ "${DOCKER_CACHE}" == "pulled" ]]; then
-        if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then
-            # "Build" segment of production image
-            push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_BUILD_IMAGE}" \
-                "${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-            # "Main" segment of production image
-            push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_IMAGE}" \
-                "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-        else
-            push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_PROD_BUILD_IMAGE}"
-            push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_PROD_IMAGE}"
-        fi
+        # "Build" segment of production image
+        push_pull_remove_images::pull_image_if_not_present_or_forced \
+            "${AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+        # "Main" segment of production image
+        push_pull_remove_images::pull_image_if_not_present_or_forced \
+            "${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
     fi
 }
 
-# Pushes Ci images and the manifest to the registry in DockerHub.
-function push_pull_remove_images::push_ci_images_to_dockerhub() {
-    push_pull_remove_images::push_image_with_retries "${AIRFLOW_PYTHON_BASE_IMAGE}"
-    push_pull_remove_images::push_image_with_retries "${AIRFLOW_CI_IMAGE}"
-    docker_v tag "${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}"
-    push_pull_remove_images::push_image_with_retries "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}"
-    if [[ -n ${DEFAULT_CI_IMAGE=} ]]; then
-        # Only push default image to DockerHub registry if it is defined
-        push_pull_remove_images::push_image_with_retries "${DEFAULT_CI_IMAGE}"
-    fi
-}
-
-
 # Push image to GitHub registry with the push tag:
 #     "${COMMIT_SHA}" - in case of pull-request triggered 'workflow_run' builds
 #     "latest"        - in case of push builds
@@ -215,9 +184,9 @@ function push_pull_remove_images::push_python_image_to_github() {
         python_tag_suffix="-${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
     fi
     docker_v tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \
-        "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${python_tag_suffix}"
+        "${AIRFLOW_PYTHON_BASE_IMAGE}${python_tag_suffix}"
     push_pull_remove_images::push_image_with_retries \
-        "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${python_tag_suffix}"
+        "${AIRFLOW_PYTHON_BASE_IMAGE}${python_tag_suffix}"
 }
 
 # Pushes Ci images and their tags to registry in GitHub
@@ -225,79 +194,46 @@ function push_pull_remove_images::push_ci_images_to_github() {
     if [[ "${PUSH_PYTHON_BASE_IMAGE=}" != "false" ]]; then
         push_pull_remove_images::push_python_image_to_github
     fi
-    local airflow_ci_tagged_image="${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+    local airflow_ci_tagged_image="${AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
     docker_v tag "${AIRFLOW_CI_IMAGE}" "${airflow_ci_tagged_image}"
     push_pull_remove_images::push_image_with_retries "${airflow_ci_tagged_image}"
+    if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} == "latest" ]]; then
+        local airflow_ci_manifest_tagged_image="${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+        docker_v tag "${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" "${airflow_ci_manifest_tagged_image}"
+        push_pull_remove_images::push_image_with_retries "${airflow_ci_manifest_tagged_image}"
+    fi
     if [[ -n ${GITHUB_SHA=} ]]; then
         # Also push image to GitHub registry with commit SHA
-        local airflow_ci_sha_image="${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${COMMIT_SHA}"
+        local airflow_ci_sha_image="${AIRFLOW_CI_IMAGE}:${COMMIT_SHA}"
         docker_v tag "${AIRFLOW_CI_IMAGE}" "${airflow_ci_sha_image}"
         push_pull_remove_images::push_image_with_retries "${airflow_ci_sha_image}"
     fi
 }
 
-
-# Pushes Ci image and it's manifest to the registry.
-function push_pull_remove_images::push_ci_images() {
-    if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then
-        push_pull_remove_images::push_ci_images_to_github
-    else
-        push_pull_remove_images::push_ci_images_to_dockerhub
-    fi
-}
-
-# Pushes PROD image to registry in DockerHub
-function push_pull_remove_images::push_prod_images_to_dockerhub () {
-    push_pull_remove_images::push_image_with_retries "${AIRFLOW_PYTHON_BASE_IMAGE}"
-    # Prod image
-    push_pull_remove_images::push_image_with_retries "${AIRFLOW_PROD_IMAGE}"
-    if [[ -n ${DEFAULT_PROD_IMAGE=} ]]; then
-        push_pull_remove_images::push_image_with_retries "${DEFAULT_PROD_IMAGE}"
-    fi
-    # Prod build image
-    push_pull_remove_images::push_image_with_retries "${AIRFLOW_PROD_BUILD_IMAGE}"
-
-}
-
-# Pushes PROD image to and their tags to registry in GitHub
+# Pushes PROD image to registry in GitHub
 # Push image to GitHub registry with chosen push tag
 # the PUSH tag might be:
 #     "${COMMIT_SHA}" - in case of pull-request triggered 'workflow_run' builds
 #     "latest"        - in case of push builds
 function push_pull_remove_images::push_prod_images_to_github () {
-    local airflow_prod_tagged_image="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+    local airflow_prod_tagged_image="${AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
     docker_v tag "${AIRFLOW_PROD_IMAGE}" "${airflow_prod_tagged_image}"
-    push_pull_remove_images::push_image_with_retries "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+    push_pull_remove_images::push_image_with_retries "${airflow_prod_tagged_image}"
     if [[ -n ${COMMIT_SHA=} ]]; then
         # Also push image to GitHub registry with commit SHA
-        local airflow_prod_sha_image="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${COMMIT_SHA}"
+        local airflow_prod_sha_image="${AIRFLOW_PROD_IMAGE}:${COMMIT_SHA}"
         docker_v tag "${AIRFLOW_PROD_IMAGE}" "${airflow_prod_sha_image}"
         push_pull_remove_images::push_image_with_retries "${airflow_prod_sha_image}"
     fi
     # Also push prod build image
-    local airflow_prod_build_tagged_image="${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
+    local airflow_prod_build_tagged_image="${AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}"
     docker_v tag "${AIRFLOW_PROD_BUILD_IMAGE}" "${airflow_prod_build_tagged_image}"
     push_pull_remove_images::push_image_with_retries "${airflow_prod_build_tagged_image}"
 }
 
-
-# Pushes PROD image to the registry. In case the image was taken from cache registry
-# it is also pushed to the cache, not to the main registry
-function push_pull_remove_images::push_prod_images() {
-    if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then
-        push_pull_remove_images::push_prod_images_to_github
-    else
-        push_pull_remove_images::push_prod_images_to_dockerhub
-    fi
-}
-
-
 # waits for an image to be available in GitHub Container Registry. Should be run with `set +e`
-function push_pull_remove_images::check_for_image_in_github_container_registry() {
-    local image_name_in_github_registry="${1}"
-    local image_tag_in_github_registry=${2}
-
-    local image_to_wait_for="ghcr.io/${GITHUB_REPOSITORY}-${image_name_in_github_registry}:${image_tag_in_github_registry}"
+function push_pull_remove_images::check_image_manifest() {
+    local image_to_wait_for="${1}"
     echo "GitHub Container Registry: checking for ${image_to_wait_for} via docker manifest inspect!"
     docker_v manifest inspect "${image_to_wait_for}"
     local res=$?
@@ -311,28 +247,15 @@ function push_pull_remove_images::check_for_image_in_github_container_registry()
 }
 
 # waits for an image to be available in the GitHub registry
-function push_pull_remove_images::wait_for_github_registry_image() {
+function push_pull_remove_images::wait_for_image() {
     set +e
-    echo " Waiting for github registry image: " "${@}"
+    echo " Waiting for github registry image: " "$1"
     while true
     do
-        if push_pull_remove_images::check_for_image_in_github_container_registry "${@}"; then
+        if push_pull_remove_images::check_image_manifest "$1"; then
             break
         fi
         sleep 30
     done
     set -e
 }
-
-function push_pull_remove_images::check_if_github_registry_wait_for_image_enabled() {
-    if [[ ${USE_GITHUB_REGISTRY} != "true" ||  ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} != "true" ]]; then
-        echo
-        echo "This script should not be called"
-        echo "It need both USE_GITHUB_REGISTRY and GITHUB_REGISTRY_WAIT_FOR_IMAGE to true!"
-        echo
-        echo "USE_GITHUB_REGISTRY = ${USE_GITHUB_REGISTRY}"
-        echo "GITHUB_REGISTRY_WAIT_FOR_IMAGE =${GITHUB_REGISTRY_WAIT_FOR_IMAGE}"
-        echo
-        exit 1
-    fi
-}
diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/tools/build_dockerhub.sh
similarity index 73%
rename from scripts/ci/images/ci_build_dockerhub.sh
rename to scripts/ci/tools/build_dockerhub.sh
index b464cf2..c520939 100755
--- a/scripts/ci/images/ci_build_dockerhub.sh
+++ b/scripts/ci/tools/build_dockerhub.sh
@@ -28,10 +28,8 @@ export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
 export INSTALL_PROVIDERS_FROM_SOURCES="false"
 export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
 export DOCKER_CACHE="local"
-export FORCE_PULL_BASE_PYTHON_IMAGE="true"
+export CHECK_IF_BASE_PYTHON_IMAGE_UPDATED="true"
 export DOCKER_TAG=${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}
-# Name the image based on the TAG rather than based on the branch name
-export FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}"
 export AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${INSTALL_AIRFLOW_VERSION}"
 export AIRFLOW_CONSTRAINTS="constraints"
 # shellcheck source=scripts/ci/libraries/_script_init.sh
@@ -44,19 +42,20 @@ rm -rf "${AIRFLOW_SOURCES}/docker-context-files/*"
 build_images::prepare_prod_build
 build_images::build_prod_images
 verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}"
+
+export RELEASE_IMAGE="apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}"
 echo
-echo "Pushing airflow image as apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}"
+echo "Pushing airflow PROD image as ${RELEASE_IMAGE}"
 echo
 # Re-tag the image to be published in "apache/airflow"
-docker tag "apache/airflow-ci:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}" \
-     "apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}"
-docker push "apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}"
+docker tag "${AIRFLOW_PROD_IMAGE}" "${RELEASE_IMAGE}"
+docker push "${RELEASE_IMAGE}"
 if [[ ${PYTHON_MAJOR_MINOR_VERSION} == "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" ]]; then
+    export DEFAULT_VERSION_IMAGE="apache/airflow:${INSTALL_AIRFLOW_VERSION}"
     echo
-    echo "Pushing default airflow image as apache/airflow:${INSTALL_AIRFLOW_VERSION}"
+    echo "Pushing default airflow image as ${DEFAULT_VERSION_IMAGE}"
     echo
     # In case of default Python version we also push ":version" tag
-    docker tag "apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}" \
-        "apache/airflow:${INSTALL_AIRFLOW_VERSION}"
-    docker push "apache/airflow:${INSTALL_AIRFLOW_VERSION}"
+    docker tag "${RELEASE_IMAGE}" "${DEFAULT_VERSION_IMAGE}"
+    docker push "${DEFAULT_VERSION_IMAGE}"
 fi
diff --git a/scripts/ci/tools/ci_clear_tmp.sh b/scripts/ci/tools/ci_clear_tmp.sh
deleted file mode 100755
index bef3fa5..0000000
--- a/scripts/ci/tools/ci_clear_tmp.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# Fixes ownership for files created inside container (files owned by root will be owned by host user)
-#
-# shellcheck source=scripts/ci/libraries/_script_init.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
-
-declare -a EXTRA_DOCKER_FLAGS
-
-sanity_checks::sanitize_mounted_files
-
-read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker_params)"
-
-docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
-    --rm \
-    --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
-    "${AIRFLOW_CI_IMAGE}" \
-    -c /opt/airflow/scripts/in_container/run_clear_tmp.sh
diff --git a/scripts/ci/tools/ci_fix_ownership.sh b/scripts/ci/tools/fix_ownership.sh
similarity index 100%
rename from scripts/ci/tools/ci_fix_ownership.sh
rename to scripts/ci/tools/fix_ownership.sh
diff --git a/scripts/ci/tools/ci_free_space_on_ci.sh b/scripts/ci/tools/free_space.sh
similarity index 100%
rename from scripts/ci/tools/ci_free_space_on_ci.sh
rename to scripts/ci/tools/free_space.sh
diff --git a/scripts/ci/tools/prepare_prod_docker_images.sh b/scripts/ci/tools/prepare_prod_docker_images.sh
index ad6cc95..928f282 100755
--- a/scripts/ci/tools/prepare_prod_docker_images.sh
+++ b/scripts/ci/tools/prepare_prod_docker_images.sh
@@ -41,5 +41,5 @@ export INSTALL_AIRFLOW_VERSION="${1}"
 for python_version in "3.6" "3.7" "3.8" "3.9"
 do
   export PYTHON_MAJOR_MINOR_VERSION=${python_version}
-  "${AIRFLOW_SOURCES_DIR}/scripts/ci/images/ci_build_dockerhub.sh"
+  "${AIRFLOW_SOURCES_DIR}/scripts/ci/tools/build_dockerhub.sh"
 done
diff --git a/scripts/in_container/airflow_ci.cfg b/scripts/in_container/airflow_ci.cfg
index 886f98e..60b6066 100644
--- a/scripts/in_container/airflow_ci.cfg
+++ b/scripts/in_container/airflow_ci.cfg
@@ -44,6 +44,7 @@ smtp_mail_from = airflow@example.com
 broker_url = amqp://guest:guest@rabbitmq:5672/
 result_backend = # overridden by startup scripts
 
+
 [celery_broker_transport_options]
 visibility_timeout = 21600
 _test_only_bool = True
diff --git a/scripts/in_container/run_clear_tmp.sh b/scripts/in_container/run_clear_tmp.sh
deleted file mode 100755
index a94923c..0000000
--- a/scripts/in_container/run_clear_tmp.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# shellcheck source=scripts/in_container/_in_container_script_init.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh"
-
-in_container_clear_tmp

[airflow] 09/29: Breeze should work with new docker-compose fallback (#16743)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 811f516da3390bc52b8d43674eabf02073990c1b
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Thu Jul 1 16:34:31 2021 +0200

    Breeze should work with new docker-compose fallback (#16743)
    
    The new Docker Desktop beta brings new docker v2 implementation
    with docker-compose being a docker command. It also provides fallback
    to docker-compose command but adding --log-level messes up
    the alias it uses. The --log-level INFO command was superfluous
    and we can get rid of it.
    
    (cherry picked from commit 258691ac448087c89d015680387822bc3c1e47e9)
---
 breeze | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/breeze b/breeze
index 173e619..9a352fd 100755
--- a/breeze
+++ b/breeze
@@ -637,7 +637,7 @@ export SQLITE_URL="${SQLITE_URL}"
 export USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION}"
 export USE_PACKAGES_FROM_DIST="${USE_PACKAGES_FROM_DIST}"
 export EXECUTOR="${EXECUTOR}"
-docker-compose --log-level INFO ${command}
+docker-compose ${command}
 EOF
     chmod u+x "${file}"
 }

[airflow] 25/29: Fix bug and small improvements in scripts/tools/list-integrations.py (#17004)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 672959cd759dd683fb920f45d100de20ef76351d
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Thu Jul 15 09:39:23 2021 +0200

    Fix bug and small improvements in scripts/tools/list-integrations.py (#17004)
    
    (cherry picked from commit 364518432751f0e8c54a742f526c0ffbc216576b)
---
 scripts/tools/list-integrations.py | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/scripts/tools/list-integrations.py b/scripts/tools/list-integrations.py
index 3df6151..5a5822e 100755
--- a/scripts/tools/list-integrations.py
+++ b/scripts/tools/list-integrations.py
@@ -29,10 +29,12 @@ from airflow.models.baseoperator import BaseOperator
 from airflow.secrets import BaseSecretsBackend
 from airflow.sensors.base import BaseSensorOperator
 
+program = f"./{__file__}" if not __file__.startswith("./") else __file__
+
 if __name__ != "__main__":
     raise Exception(
-        "This file is intended to be executed as an executable program. You cannot use it as a module."
-        "To run this script, run the './list-integrations.py' command"
+        "This file is intended to be used as an executable program. You cannot use it as a module."
+        f"To execute this script, run the '{program}' command"
     )
 
 AIRFLOW_ROOT = os.path.abspath(os.path.join(os.path.dirname(airflow.__file__), os.pardir))
@@ -66,8 +68,6 @@ def _find_clazzes(directory, base_class):
     return found_classes
 
 
-program = "./" + os.path.basename(sys.argv[0])
-
 HELP = """\
 List operators, hooks, sensors, secrets backend in the installed Airflow.
 
@@ -96,7 +96,7 @@ If you want to count the operators/sensors in each providers package, you can us
 """
 
 parser = argparse.ArgumentParser(
-    description=HELP, formatter_class=argparse.RawTextHelpFormatter, epilog=EPILOG
+    prog=program, description=HELP, formatter_class=argparse.RawTextHelpFormatter, epilog=EPILOG
 )
 # argparse handle `-h/--help/` internally
 parser.parse_args()
@@ -115,5 +115,5 @@ for integration_base_directory, integration_class in RESOURCE_TYPES.items():
         if "contrib" in integration_directory:
             continue
 
-        for clazz_to_print in sorted(_find_clazzes(integration_base_directory, integration_class)):
+        for clazz_to_print in sorted(_find_clazzes(integration_directory, integration_class)):
             print(clazz_to_print)

[airflow] 15/29: Remove cache for kubernetes tests (#16927)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 7607ad7719eafba3aa08e7738c576843a3fb2934
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sun Jul 11 15:21:00 2021 +0200

    Remove cache for kubernetes tests (#16927)
    
    Different python versions are used for different tests for k8s
    so we should not attempt to cache the venv for tests, otherwise
    they will randomly fail.
    
    (cherry picked from commit e8e9a6da12e44d049095fc2b1703a4024dcd75c3)
---
 .github/workflows/ci.yml | 11 -----------
 1 file changed, 11 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 609b1c4..d13a6d5 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -934,17 +934,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
         env:
           VERIFY_IMAGE: "false"
-      - name: "Cache virtualenv for kubernetes testing"
-        uses: actions/cache@v2
-        with:
-          path: ".build/.kubernetes_venv"
-          key: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}\
--${{needs.build-info.outputs.kubernetesVersionsListAsString}}
--${{needs.build-info.outputs.pythonVersionsListAsString}}
--${{ hashFiles('setup.py','setup.cfg') }}"
-          restore-keys: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}-\
--${{needs.build-info.outputs.kubernetesVersionsListAsString}}
--${{needs.build-info.outputs.pythonVersionsListAsString}}"
       - name: "Cache bin folder with tools for kubernetes testing"
         uses: actions/cache@v2
         with:

[airflow] 24/29: Drop support for Airflow 1.10 in entrypoint_prod.sh and improve MSSQL compatibility (#17011)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 5ca1a2e9db52b320a057e072b8adc0ddac08d818
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Thu Jul 15 07:37:52 2021 +0200

    Drop support for Airflow 1.10 in entrypoint_prod.sh and improve MSSQL compatibility (#17011)
    
    (cherry picked from commit cc33d7e513e0f66a94a6e6277d6d30c08de94d64)
---
 docs/docker-stack/entrypoint.rst             | 26 ++++-----------------
 scripts/in_container/prod/entrypoint_prod.sh | 34 ++++------------------------
 2 files changed, 9 insertions(+), 51 deletions(-)

diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst
index c386a67..0a0de5d 100644
--- a/docs/docker-stack/entrypoint.rst
+++ b/docs/docker-stack/entrypoint.rst
@@ -94,32 +94,14 @@ You can read more about it in the "Support arbitrary user ids" chapter in the
 Waits for Airflow DB connection
 -------------------------------
 
-In case Postgres or MySQL DB is used, the entrypoint will wait until the airflow DB connection becomes
-available. This happens always when you use the default entrypoint.
+The entrypoint is waiting for a connection to the database independent of the database engine. This allows us to increase
+the stability of the environment.
 
-The script detects backend type depending on the URL schema and assigns default port numbers if not specified
-in the URL. Then it loops until the connection to the host/port specified can be established
+Waiting for connection involves executing ``airflow db check`` command, which means that a ``select 1 as is_alive;`` statement
+is executed. Then it loops until the the command will be successful.
 It tries :envvar:`CONNECTION_CHECK_MAX_COUNT` times and sleeps :envvar:`CONNECTION_CHECK_SLEEP_TIME` between checks
 To disable check, set ``CONNECTION_CHECK_MAX_COUNT=0``.
 
-Supported schemes:
-
-* ``postgres://`` - default port 5432
-* ``mysql://``    - default port 3306
-* ``sqlite://``
-
-In case of SQLite backend, there is no connection to establish and waiting is skipped.
-
-For older than Airflow 1.10.14, waiting for connection involves checking if a matching port is open.
-The host information is derived from the variables :envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN` and
-:envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD`. If :envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD` variable
-is passed to the container, it is evaluated as a command to execute and result of this evaluation is used
-as :envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN`. The :envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD` variable
-takes precedence over the :envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN` variable.
-
-For newer versions, the ``airflow db check`` command is used, which means that a ``select 1 as is_alive;`` query
-is executed. This also means that you can keep your password in secret backend.
-
 Waits for celery broker connection
 ----------------------------------
 
diff --git a/scripts/in_container/prod/entrypoint_prod.sh b/scripts/in_container/prod/entrypoint_prod.sh
index adeff61..9d8e74a 100755
--- a/scripts/in_container/prod/entrypoint_prod.sh
+++ b/scripts/in_container/prod/entrypoint_prod.sh
@@ -153,13 +153,6 @@ function create_www_user() {
        --lastname "${_AIRFLOW_WWW_USER_LASTNME="Admin"}" \
        --email "${_AIRFLOW_WWW_USER_EMAIL="airflowadmin@example.com"}" \
        --role "${_AIRFLOW_WWW_USER_ROLE="Admin"}" \
-       --password "${local_password}" ||
-    airflow create_user \
-       --username "${_AIRFLOW_WWW_USER_USERNAME="admin"}" \
-       --firstname "${_AIRFLOW_WWW_USER_FIRSTNAME="Airflow"}" \
-       --lastname "${_AIRFLOW_WWW_USER_LASTNME="Admin"}" \
-       --email "${_AIRFLOW_WWW_USER_EMAIL="airflowadmin@example.com"}" \
-       --role "${_AIRFLOW_WWW_USER_ROLE="Admin"}" \
        --password "${local_password}" || true
 }
 
@@ -193,30 +186,13 @@ function set_pythonpath_for_root_user() {
 }
 
 function wait_for_airflow_db() {
-    # Check if Airflow has a command to check the connection to the database.
-    if ! airflow db check --help >/dev/null 2>&1; then
-        run_check_with_retries "airflow db check"
-    else
-        # Verify connections to the Airflow DB by guessing the database address based on environment variables,
-        # then uses netcat to check that the host is reachable.
-        # This is only used by Airflow 1.10+ as there are no built-in commands to check the db connection.
-        local connection_url
-        if [[ -n "${AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD=}" ]]; then
-            connection_url="$(eval "${AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD}")"
-        else
-            # if no DB configured - use sqlite db by default
-            connection_url="${AIRFLOW__CORE__SQL_ALCHEMY_CONN:="sqlite:///${AIRFLOW_HOME}/airflow.db"}"
-        fi
-        # SQLite doesn't require a remote connection, so we don't have to wait.
-        if [[ ${connection_url} != sqlite* ]]; then
-            wait_for_connection "${connection_url}"
-        fi
-    fi
+    # Wait for the command to run successfully to validate the database connection.
+    run_check_with_retries "airflow db check"
 }
 
 function upgrade_db() {
     # Runs airflow db upgrade
-    airflow db upgrade || airflow upgradedb || true
+    airflow db upgrade || true
 }
 
 function wait_for_celery_backend() {
@@ -232,8 +208,8 @@ function wait_for_celery_backend() {
 }
 
 function exec_to_bash_or_python_command_if_specified() {
-    # If one of the commands: 'airflow', 'bash', 'python' is used, either run appropriate
-    # command with exec or update the command line parameters
+    # If one of the commands: 'bash', 'python' is used, either run appropriate
+    # command with exec
     if [[ ${AIRFLOW_COMMAND} == "bash" ]]; then
        shift
        exec "/bin/bash" "${@}"

[airflow] 29/29: Fixes detection of version 2 of docker-compose (#17062)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 91007efc02b224ee40ef38d0ac8d131615e406d9
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sat Jul 17 22:18:27 2021 +0200

    Fixes detection of version 2 of docker-compose (#17062)
    
    Docker compose 2 added `v` in front of the version :(
    
    (cherry picked from commit 16564cad6f2956ecb842455d9d6a6255f8d3d817)
---
 breeze | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/breeze b/breeze
index d048bd8..7decdf6 100755
--- a/breeze
+++ b/breeze
@@ -591,9 +591,9 @@ function breeze::prepare_command_file() {
     cat <<EOF >"${file}"
 #!/usr/bin/env bash
 docker_compose_version=\$(docker-compose --version)
-if [[ \${docker_compose_version} =~ .*version\ 2.* ]]; then
+if [[ \${docker_compose_version} =~ .*version\ v2.* ]]; then
   echo
-  echo "${COLOR_RED}Docker Compose Beta version 2has bug that prevents breeze from running.${COLOR_RESET}"
+  echo "${COLOR_RED}Docker Compose Beta version v2 has a bug that prevents breeze from running.${COLOR_RESET}"
   echo "${COLOR_RED}You have: \${docker_compose_version}.${COLOR_RESET}"
   echo
   echo "${COLOR_YELLOW}Please switch to stable version via Docker Desktop -> Experimental or by running:${COLOR_RESET}"

[airflow] 08/29: Fix timing out tests for public GitHub Runners. (#16750)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit fa813c2b87cdc6b85ef8159eefe5d3aa2c254019
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Thu Jul 1 14:29:52 2021 +0200

    Fix timing out tests for public GitHub Runners. (#16750)
    
    This PR:
    
    * upgrades kind to latest version with security fixes
    * increases timeouts to account for low-resource GitHub Runners
    
    Fixes: #16736
    (cherry picked from commit e40c5a268d8dc24d1e6b00744308ef705224cb66)
---
 BREEZE.rst                              | 4 ++--
 breeze-complete                         | 2 +-
 scripts/ci/libraries/_initialization.sh | 2 +-
 scripts/ci/libraries/_kind.sh           | 4 +++-
 4 files changed, 7 insertions(+), 5 deletions(-)

diff --git a/BREEZE.rst b/BREEZE.rst
index 2082744a..d199367 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -2444,9 +2444,9 @@ This is the current syntax for  `./breeze <./breeze>`_:
           Kind version - only used in case one of kind-cluster commands is used.
           One of:
 
-                 v0.10.0
+                 v0.11.1
 
-          Default: v0.10.0
+          Default: v0.11.1
 
   --helm-version HELM_VERSION
           Helm version - only used in case one of kind-cluster commands is used.
diff --git a/breeze-complete b/breeze-complete
index 9e21c2c..65a3ee0 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -32,7 +32,7 @@ _breeze_allowed_github_registrys="ghcr.io docker.pkg.github.com"
 _breeze_allowed_kubernetes_modes="image"
 _breeze_allowed_kubernetes_versions="v1.20.2 v1.19.7 v1.18.15"
 _breeze_allowed_helm_versions="v3.2.4"
-_breeze_allowed_kind_versions="v0.10.0"
+_breeze_allowed_kind_versions="v0.11.1"
 _breeze_allowed_mysql_versions="5.7 8"
 _breeze_allowed_postgres_versions="9.6 10 11 12 13"
 _breeze_allowed_kind_operations="start stop restart status deploy test shell k9s"
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 034cd37..2b5458e 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -489,7 +489,7 @@ function initialization::initialize_kubernetes_variables() {
     CURRENT_KUBERNETES_MODES+=("image")
     export CURRENT_KUBERNETES_MODES
     # Currently supported versions of Kind
-    CURRENT_KIND_VERSIONS+=("v0.10.0")
+    CURRENT_KIND_VERSIONS+=("v0.11.1")
     export CURRENT_KIND_VERSIONS
     # Currently supported versions of Helm
     CURRENT_HELM_VERSIONS+=("v3.2.4")
diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh
index 085dfac..7e06ed3 100644
--- a/scripts/ci/libraries/_kind.sh
+++ b/scripts/ci/libraries/_kind.sh
@@ -335,7 +335,9 @@ function kind::deploy_airflow_with_helm() {
     pushd "${chartdir}/chart" >/dev/null 2>&1 || exit 1
     helm repo add stable https://charts.helm.sh/stable/
     helm dep update
-    helm install airflow . --namespace "${HELM_AIRFLOW_NAMESPACE}" \
+    helm install airflow . \
+        --timeout 10m0s \
+        --namespace "${HELM_AIRFLOW_NAMESPACE}" \
         --set "defaultAirflowRepository=${DOCKERHUB_USER}/${DOCKERHUB_REPO}" \
         --set "images.airflow.repository=${DOCKERHUB_USER}/${DOCKERHUB_REPO}" \
         --set "images.airflow.tag=${AIRFLOW_PROD_BASE_TAG}-kubernetes" -v 1 \

[airflow] 05/29: Use different executors for Helm Chart tests in CI (#15791)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit bc49f1f25e2336ff3d457b5989f5c695a42b81f7
Author: Ephraim Anierobi <sp...@gmail.com>
AuthorDate: Mon May 17 22:50:23 2021 +0100

    Use different executors for Helm Chart tests in CI (#15791)
    
    closes https://github.com/apache/airflow/issues/14301
    
    (cherry picked from commit 2a7298d6fc8a4f587ce51d9a2f75aa23231343c3)
---
 .github/workflows/ci.yml                                            | 6 +++++-
 kubernetes_tests/test_kubernetes_pod_operator.py                    | 2 ++
 kubernetes_tests/test_kubernetes_pod_operator_backcompat.py         | 5 ++++-
 .../kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh | 1 +
 .../ci_setup_cluster_and_run_kubernetes_tests_single_job.sh         | 1 +
 .../ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh       | 1 -
 scripts/ci/libraries/_initialization.sh                             | 4 ++--
 scripts/ci/libraries/_kind.sh                                       | 4 ----
 8 files changed, 15 insertions(+), 9 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 569dc17..4a8a209 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -915,15 +915,19 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
 
   tests-kubernetes:
     timeout-minutes: 50
-    name: Helm Chart
+    name: Helm Chart; ${{matrix.executor}}
     runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
     needs: [build-info, prod-images]
+    strategy:
+      matrix:
+        executor: [KubernetesExecutor, CeleryExecutor, LocalExecutor]
     env:
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
       BACKEND: postgres
       RUN_TESTS: "true"
       RUNTIME: "kubernetes"
       KUBERNETES_MODE: "image"
+      EXECUTOR: ${{matrix.executor}}
       KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}"
       HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}"
       GITHUB_REGISTRY: ${{ needs.prod-images.outputs.githubRegistry }}
diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes_tests/test_kubernetes_pod_operator.py
index 2da5da0..9d32782 100644
--- a/kubernetes_tests/test_kubernetes_pod_operator.py
+++ b/kubernetes_tests/test_kubernetes_pod_operator.py
@@ -40,6 +40,7 @@ from airflow.providers.cncf.kubernetes.utils.pod_launcher import PodLauncher
 from airflow.providers.cncf.kubernetes.utils.xcom_sidecar import PodDefaults
 from airflow.utils import timezone
 from airflow.version import version as airflow_version
+from kubernetes_tests.test_base import EXECUTOR
 
 
 def create_context(task):
@@ -62,6 +63,7 @@ def get_kubeconfig_path():
     return kubeconfig_path if kubeconfig_path else os.path.expanduser('~/.kube/config')
 
 
+@pytest.mark.skipif(EXECUTOR != 'KubernetesExecutor', reason="Only runs on KubernetesExecutor")
 class TestKubernetesPodOperatorSystem(unittest.TestCase):
     def get_current_task_name(self):
         # reverse test name to make pod name unique (it has limited length)
diff --git a/kubernetes_tests/test_kubernetes_pod_operator_backcompat.py b/kubernetes_tests/test_kubernetes_pod_operator_backcompat.py
index ea9c9ee..f2058c3 100644
--- a/kubernetes_tests/test_kubernetes_pod_operator_backcompat.py
+++ b/kubernetes_tests/test_kubernetes_pod_operator_backcompat.py
@@ -41,9 +41,11 @@ from airflow.providers.cncf.kubernetes.utils.xcom_sidecar import PodDefaults
 from airflow.utils import timezone
 from airflow.utils.state import State
 from airflow.version import version as airflow_version
-
+from kubernetes_tests.test_base import EXECUTOR
 
 # noinspection DuplicatedCode
+
+
 def create_context(task):
     dag = DAG(dag_id="dag")
     tzinfo = pendulum.timezone("Europe/Amsterdam")
@@ -60,6 +62,7 @@ def create_context(task):
 
 
 # noinspection DuplicatedCode,PyUnusedLocal
+@pytest.mark.skipif(EXECUTOR != 'KubernetesExecutor', reason="Only runs on KubernetesExecutor")
 class TestKubernetesPodOperatorSystem(unittest.TestCase):
     def get_current_task_name(self):
         # reverse test name to make pod name unique (it has limited length)
diff --git a/scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh b/scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh
index 1e0fa36..fce0a4c 100755
--- a/scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh
+++ b/scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh
@@ -16,6 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 
+
 export SKIP_BUILDING_PROD_IMAGE="true"
 
 # shellcheck source=scripts/ci/libraries/_script_init.sh
diff --git a/scripts/ci/kubernetes/ci_setup_cluster_and_run_kubernetes_tests_single_job.sh b/scripts/ci/kubernetes/ci_setup_cluster_and_run_kubernetes_tests_single_job.sh
index 9b0d86f..914e426 100755
--- a/scripts/ci/kubernetes/ci_setup_cluster_and_run_kubernetes_tests_single_job.sh
+++ b/scripts/ci/kubernetes/ci_setup_cluster_and_run_kubernetes_tests_single_job.sh
@@ -30,6 +30,7 @@ fi
 export PYTHON_MAJOR_MINOR_VERSION=$1
 shift
 
+
 # Requires PARALLEL_JOB_STATUS
 
 if [[ -z "${PARALLEL_JOB_STATUS=}" ]]; then
diff --git a/scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh b/scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh
index 88aa2cd..5790b94 100755
--- a/scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh
+++ b/scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh
@@ -101,6 +101,5 @@ parallel::make_sure_kubernetes_versions_are_specified
 get_maximum_parallel_k8s_jobs
 
 run_k8s_tests_in_parallel "${@}"
-
 # this will exit with error code in case some of the tests failed
 parallel::print_job_summary_and_return_status_code
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 0bc9dc1..034cd37 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -25,6 +25,7 @@ CURRENT_POSTGRES_VERSIONS=()
 CURRENT_MYSQL_VERSIONS=()
 CURRENT_KIND_VERSIONS=()
 CURRENT_HELM_VERSIONS=()
+CURRENT_EXECUTOR=()
 ALL_PYTHON_MAJOR_MINOR_VERSIONS=()
 INSTALLED_PROVIDERS=()
 
@@ -790,9 +791,8 @@ function initialization::make_constants_read_only() {
     readonly KUBERNETES_VERSION
     readonly KIND_VERSION
     readonly HELM_VERSION
-    readonly EXECUTOR
     readonly KUBECTL_VERSION
-
+    readonly EXECUTOR
     readonly POSTGRES_VERSION
     readonly MYSQL_VERSION
 
diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh
index d5adadf..085dfac 100644
--- a/scripts/ci/libraries/_kind.sh
+++ b/scripts/ci/libraries/_kind.sh
@@ -140,10 +140,6 @@ function kind::perform_kind_cluster_operation() {
     echo "Kubernetes mode: ${KUBERNETES_MODE}"
     echo
 
-    echo
-    echo "Executor: ${EXECUTOR}"
-    echo
-
     if [[ ${operation} == "status" ]]; then
         if [[ ${all_clusters} == *"${KIND_CLUSTER_NAME}"* ]]; then
             echo

[airflow] 20/29: Fixes typo in the name of file for Breeze docker compose env (#16971)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit f5c64aae15a193f430f09b8c343109b1a49d8165
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Tue Jul 13 13:47:18 2021 +0200

    Fixes typo in the name of file for Breeze docker compose env (#16971)
    
    There was a typo in #16959
    
    (cherry picked from commit eeb45733e70442122c50e99a5ac5fac38c96d080)
---
 scripts/ci/docker-compose/base.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml
index d01368c..a9ae734 100644
--- a/scripts/ci/docker-compose/base.yml
+++ b/scripts/ci/docker-compose/base.yml
@@ -26,7 +26,7 @@ services:
       - KUBECONFIG=/files/.kube/config
       - HOST_HOME=${HOME}
     env_file:
-      - _docke_compose.env
+      - _docker_compose.env
     volumes:
       # Pass docker to inside of the container so that Kind and Moto tests can use it.
       - /var/run/docker.sock:/var/run/docker.sock

[airflow] 13/29: Fix Airflow releasing guide (#16924)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit b7493b6957aa66cfeead2829d7f1d43949c0d556
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Sun Jul 11 08:22:36 2021 +0100

    Fix Airflow releasing guide (#16924)
    
    Fix Airflow releasing guide with some minor issues
    
    (cherry picked from commit d9f39bbc4d5f025a7643d3e66d38c5a6e4539b9b)
---
 dev/README_RELEASE_AIRFLOW.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md
index a281cda..72988a9 100644
--- a/dev/README_RELEASE_AIRFLOW.md
+++ b/dev/README_RELEASE_AIRFLOW.md
@@ -84,13 +84,13 @@ The Release Candidate artifacts we vote upon should be the exact ones we vote ag
 - Clean the checkout: the sdist step below will
 
     ```shell script
-    rm -rf dist/*
     git clean -fxd
     ```
 
 - Tarball the repo
 
     ```shell script
+    mkdir dist
     git archive --format=tar.gz ${VERSION} \
         --prefix=apache-airflow-${VERSION_WITHOUT_RC}/ \
         -o dist/apache-airflow-${VERSION_WITHOUT_RC}-source.tar.gz
@@ -178,7 +178,7 @@ To do this we need to
 - Build the package:
 
     ```shell script
-    ./breeze prepare-airflow-package --version-suffix-for-pypi "${VERSION_SUFFIX}"
+    ./breeze prepare-airflow-packages --version-suffix-for-pypi "${VERSION_SUFFIX}" --package-format both
     ```
 
 - Verify the artifacts that would be uploaded:

[airflow] 19/29: Fixes passing variables via docker --env-file command (#16959)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit e3ef80de9e4736e55ca5de0e687413b58aa8f13b
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Tue Jul 13 08:51:53 2021 +0200

    Fixes passing variables via docker --env-file command (#16959)
    
    The #16950 aimed to fix an incompatibility introduced by
    beta version of docker-compose v2 (which is automatically pushed
    to MacOS users now).
    
    The issue is documented in
    https://github.com/docker/compose-cli/issues/1917
    
    Unfortunately it has an undesired side-effect that the same file
    cannot be used to specify list of variables for docker command
    (the variables come empty).
    
    Until the problem is solved, we need to keep two copies of those
    variable files. Not ideal, but hopefully the issue will be solved
    soon and we can go back to original env file in docker-compose v2.
    
    (cherry picked from commit 72781c0b4548d35a37fbbef9c2417f94d2025f8f)
---
 scripts/ci/docker-compose/_docker.env              | 96 +++++++++++-----------
 .../{_docker.env => _docker_compose.env}           |  6 ++
 scripts/ci/docker-compose/base.yml                 |  2 +-
 3 files changed, 55 insertions(+), 49 deletions(-)

diff --git a/scripts/ci/docker-compose/_docker.env b/scripts/ci/docker-compose/_docker.env
index c88300a..6e8a5c5 100644
--- a/scripts/ci/docker-compose/_docker.env
+++ b/scripts/ci/docker-compose/_docker.env
@@ -14,51 +14,51 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-AIRFLOW_CI_IMAGE=
-AIRFLOW_EXTRAS=
-BACKEND=
-BREEZE=
-CI=
-CI_BUILD_ID=
-CI_JOB_ID=
-CI_EVENT_TYPE=
-CI_TARGET_REPO=
-CI_TARGET_BRANCH=
-COMMIT_SHA=
-DB_RESET=
-DEFAULT_CONSTRAINTS_BRANCH=
-ENABLED_INTEGRATIONS=
-ENABLED_SYSTEMS=
-GITHUB_ACTIONS=
-GITHUB_REGISTRY_PULL_IMAGE_TAG=
-HOST_USER_ID=
-HOST_GROUP_ID=
-HOST_OS=
-HOST_HOME=
-INIT_SCRIPT_FILE=
-INSTALL_AIRFLOW_VERSION=
-GENERATE_CONSTRAINTS_MODE=
-INSTALL_PROVIDERS_FROM_SOURCES=
-USE_AIRFLOW_VERSION=
-USE_PACKAGES_FROM_DIST=
-ISSUE_ID=
-LOAD_DEFAULT_CONNECTIONS=
-LOAD_EXAMPLES=
-MYSQL_VERSION=
-NUM_RUNS=
-PACKAGE_FORMAT=
-POSTGRES_VERSION=
-PRINT_INFO_FROM_SCRIPTS=
-PYTHONDONTWRITEBYTECODE=
-PYTHON_MAJOR_MINOR_VERSION=
-RUN_TESTS=
-RUN_INTEGRATION_TESTS=
-RUN_SYSTEM_TESTS=
-START_AIRFLOW=
-TEST_TYPE=
-UPGRADE_TO_NEWER_DEPENDENCIES=
-VERBOSE=
-VERBOSE_COMMANDS=
-VERSION_SUFFIX_FOR_PYPI=
-VERSION_SUFFIX_FOR_SVN=
-WHEEL_VERSION=
+AIRFLOW_CI_IMAGE
+AIRFLOW_EXTRAS
+BACKEND
+BREEZE
+CI
+CI_BUILD_ID
+CI_JOB_ID
+CI_EVENT_TYPE
+CI_TARGET_REPO
+CI_TARGET_BRANCH
+COMMIT_SHA
+DB_RESET
+DEFAULT_CONSTRAINTS_BRANCH
+ENABLED_INTEGRATIONS
+ENABLED_SYSTEMS
+GITHUB_ACTIONS
+GITHUB_REGISTRY_PULL_IMAGE_TAG
+HOST_USER_ID
+HOST_GROUP_ID
+HOST_OS
+HOST_HOME
+INIT_SCRIPT_FILE
+INSTALL_AIRFLOW_VERSION
+GENERATE_CONSTRAINTS_MODE
+INSTALL_PROVIDERS_FROM_SOURCES
+USE_AIRFLOW_VERSION
+USE_PACKAGES_FROM_DIST
+ISSUE_ID
+LOAD_DEFAULT_CONNECTIONS
+LOAD_EXAMPLES
+MYSQL_VERSION
+NUM_RUNS
+PACKAGE_FORMAT
+POSTGRES_VERSION
+PRINT_INFO_FROM_SCRIPTS
+PYTHONDONTWRITEBYTECODE
+PYTHON_MAJOR_MINOR_VERSION
+RUN_TESTS
+RUN_INTEGRATION_TESTS
+RUN_SYSTEM_TESTS
+START_AIRFLOW
+TEST_TYPE
+UPGRADE_TO_NEWER_DEPENDENCIES
+VERBOSE
+VERBOSE_COMMANDS
+VERSION_SUFFIX_FOR_PYPI
+VERSION_SUFFIX_FOR_SVN
+WHEEL_VERSION
diff --git a/scripts/ci/docker-compose/_docker.env b/scripts/ci/docker-compose/_docker_compose.env
similarity index 85%
copy from scripts/ci/docker-compose/_docker.env
copy to scripts/ci/docker-compose/_docker_compose.env
index c88300a..b842270 100644
--- a/scripts/ci/docker-compose/_docker.env
+++ b/scripts/ci/docker-compose/_docker_compose.env
@@ -14,6 +14,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+#
+# Until https://github.com/docker/compose-cli/issues/1917 is resolved
+# we need to keep this docker_compose.env separately to accoun for different
+# behaviour of Docker-Compose V2 Beta (which became default for MacOS users updating
+# their Docker Desktop to a newer version
+#
 AIRFLOW_CI_IMAGE=
 AIRFLOW_EXTRAS=
 BACKEND=
diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml
index 34cc100..d01368c 100644
--- a/scripts/ci/docker-compose/base.yml
+++ b/scripts/ci/docker-compose/base.yml
@@ -26,7 +26,7 @@ services:
       - KUBECONFIG=/files/.kube/config
       - HOST_HOME=${HOME}
     env_file:
-      - _docker.env
+      - _docke_compose.env
     volumes:
       # Pass docker to inside of the container so that Kind and Moto tests can use it.
       - /var/run/docker.sock:/var/run/docker.sock

[airflow] 10/29: Switches to ghcr.io container registry (#16775)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 061ab9b26f530daf48a959237b6eb00001f593c4
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Fri Jul 2 15:26:38 2021 +0200

    Switches to ghcr.io container registry (#16775)
    
    After fixing permission problems, we can now switch to ghcr.io
    
    (cherry picked from commit d56a2b407e93664bd78dae01c5b8f7103489c370)
---
 .github/workflows/build-images.yml | 2 +-
 .github/workflows/ci.yml           | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml
index 664d527..9097241 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -47,7 +47,7 @@ env:
   GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
   GITHUB_REGISTRY_WAIT_FOR_IMAGE: "false"
   INSTALL_PROVIDERS_FROM_SOURCES: "true"
-  GITHUB_REGISTRY: "docker.pkg.github.com"
+  GITHUB_REGISTRY: "ghcr.io"
   TARGET_COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
 
 concurrency:
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 4a8a209..8b2735c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -37,7 +37,7 @@ env:
   VERBOSE: "true"
   DOCKER_CACHE: "pulled"
   USE_GITHUB_REGISTRY: "true"
-  GITHUB_REGISTRY: "docker.pkg.github.com"
+  GITHUB_REGISTRY: "ghcr.io"
   GITHUB_REPOSITORY: ${{ github.repository }}
   GITHUB_USERNAME: ${{ github.actor }}
   # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the

[airflow] 03/29: Add preparation of images as part of RC preparation process (#16674)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit a1e9d43f20f7b8f4734bf183519f4f4b6990a9f3
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Jun 28 14:35:12 2021 +0200

    Add preparation of images as part of RC preparation process (#16674)
    
    (cherry picked from commit 98c12d49f37f6879e3e9fd926853f57a15ab761b)
---
 CI.rst                                  | 16 ----------------
 dev/README_RELEASE_AIRFLOW.md           | 20 +++++++++++++++++---
 scripts/ci/images/ci_build_dockerhub.sh |  5 ++++-
 scripts/ci/libraries/_build_images.sh   |  4 ++--
 4 files changed, 23 insertions(+), 22 deletions(-)

diff --git a/CI.rst b/CI.rst
index 1ab37d1..4b92d2d 100644
--- a/CI.rst
+++ b/CI.rst
@@ -573,22 +573,6 @@ Those runs and their corresponding ``Build Images`` runs are only executed in ma
 repository, they are not executed in forks - we want to be nice to the contributors and not use their
 free build minutes on GitHub Actions.
 
-Sometimes (bugs in DockerHub or prolonged periods when the scheduled builds are failing)
-the automated build for nightly main is not executed for a long time. Such builds can be manually
-prepared and pushed by a maintainer who has the rights to push images to DockerHub (committers need
-to file JIRA ticket to Apache Infra in order to get an access).
-
-.. code-block:: bash
-
-  export BRANCH=main
-  export DOCKER_REPO=docker.io/apache/airflow
-  for python_version in "3.6" "3.7" "3.8"
-  (
-    export DOCKER_TAG=${BRANCH}-python${python_version}
-    ./scripts/ci/images/ci_build_dockerhub.sh
-  )
-
-
 
 Workflows
 =========
diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md
index 2d6a0b1..a281cda 100644
--- a/dev/README_RELEASE_AIRFLOW.md
+++ b/dev/README_RELEASE_AIRFLOW.md
@@ -22,6 +22,7 @@
 
 - [Prepare the Apache Airflow Package RC](#prepare-the-apache-airflow-package-rc)
   - [Build RC artifacts](#build-rc-artifacts)
+  - [Manually prepare production Docker Image](#manually-prepare-production-docker-image)
   - [[\Optional\] Create new release branch](#%5Coptional%5C-create-new-release-branch)
   - [Prepare PyPI convenience "snapshot" packages](#prepare-pypi-convenience-snapshot-packages)
   - [Prepare production Docker Image](#prepare-production-docker-image)
@@ -37,7 +38,7 @@
   - [Publish release to SVN](#publish-release-to-svn)
   - [Prepare PyPI "release" packages](#prepare-pypi-release-packages)
   - [Update CHANGELOG.md](#update-changelogmd)
-  - [Manually prepare production Docker Image](#manually-prepare-production-docker-image)
+  - [Manually prepare production Docker Image](#manually-prepare-production-docker-image-1)
   - [Publish documentation](#publish-documentation)
   - [Notify developers of release](#notify-developers-of-release)
   - [Update Announcements page](#update-announcements-page)
@@ -56,8 +57,9 @@ The Release Candidate artifacts we vote upon should be the exact ones we vote ag
 
     ```shell script
     # Set Version
-    export VERSION=2.0.2rc3
+    export VERSION=2.1.2rc3
     export VERSION_SUFFIX=rc3
+    export VERSION_CONSTRAINT_BRANCH=2-1
     export VERSION_WITHOUT_RC=${VERSION/rc?/}
 
     # Set AIRFLOW_REPO_ROOT to the path of your git repo
@@ -105,7 +107,7 @@ The Release Candidate artifacts we vote upon should be the exact ones we vote ag
 - Tag & Push the latest constraints files. This pushes constraints with rc suffix (this is expected)!
 
     ```shell script
-    git checkout constraints-2-0
+    git checkout constraints-${VERSION_CONSTRAINT_BRANCH}
     git tag -s "constraints-${VERSION}"
     git push origin "constraints-${VERSION}"
     ```
@@ -127,6 +129,18 @@ The Release Candidate artifacts we vote upon should be the exact ones we vote ag
     svn commit -m "Add artifacts for Airflow ${VERSION}"
     ```
 
+
+## Manually prepare production Docker Image
+
+
+```shell script
+./scripts/ci/tools/prepare_prod_docker_images.sh ${VERSION}
+```
+
+This will wipe Breeze cache and docker-context-files in order to make sure the build is "clean". It
+also performs image verification before pushing the images.
+
+
 ## [\Optional\] Create new release branch
 
 When you just released the `X.Y.0` version (first release of new minor version) you need to create release
diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/images/ci_build_dockerhub.sh
index 495295a..b464cf2 100755
--- a/scripts/ci/images/ci_build_dockerhub.sh
+++ b/scripts/ci/images/ci_build_dockerhub.sh
@@ -44,13 +44,16 @@ rm -rf "${AIRFLOW_SOURCES}/docker-context-files/*"
 build_images::prepare_prod_build
 build_images::build_prod_images
 verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}"
+echo
+echo "Pushing airflow image as apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}"
+echo
 # Re-tag the image to be published in "apache/airflow"
 docker tag "apache/airflow-ci:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}" \
      "apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}"
 docker push "apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}"
 if [[ ${PYTHON_MAJOR_MINOR_VERSION} == "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" ]]; then
     echo
-    echo "Pushing default airflow image"
+    echo "Pushing default airflow image as apache/airflow:${INSTALL_AIRFLOW_VERSION}"
     echo
     # In case of default Python version we also push ":version" tag
     docker tag "apache/airflow:${INSTALL_AIRFLOW_VERSION}-python${PYTHON_MAJOR_MINOR_VERSION}" \
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index d9c77aa..0a7f6bf 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -743,8 +743,8 @@ function build_images::prepare_prod_build() {
         build_images::add_build_args_for_remote_install
     elif [[ -n "${INSTALL_AIRFLOW_VERSION=}" ]]; then
         # When --install-airflow-version is used then the image is build using released PIP package
-        # For PROD image only numeric versions are allowed
-        if [[ ! ${INSTALL_AIRFLOW_VERSION} =~ ^[0-9\.]*$ ]]; then
+        # For PROD image only numeric versions are allowed and RC candidates
+        if [[ ! ${INSTALL_AIRFLOW_VERSION} =~ ^[0-9\.]+(rc[0-9]+)?$ ]]; then
             echo
             echo  "${COLOR_RED}ERROR: Bad value for install-airflow-version: '${INSTALL_AIRFLOW_VERSION}'. Only numerical versions allowed for PROD image here'!${COLOR_RESET}"
             echo

[airflow] 07/29: Adding missing word to welcome message (#16726)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0c24da006508302faa3baf686a742fd617ea1465
Author: josh-fell <48...@users.noreply.github.com>
AuthorDate: Wed Jun 30 04:57:38 2021 -0400

    Adding missing word to welcome message (#16726)
    
    (cherry picked from commit df06a71bfc5e637fda38d555864b791ab5f0ad7d)
---
 scripts/in_container/run_tmux_welcome.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/scripts/in_container/run_tmux_welcome.sh b/scripts/in_container/run_tmux_welcome.sh
index 976478b..68360c6 100755
--- a/scripts/in_container/run_tmux_welcome.sh
+++ b/scripts/in_container/run_tmux_welcome.sh
@@ -19,5 +19,5 @@ cd /opt/airflow/ || exit
 clear
 echo "Welcome to your tmux based running Airflow environment (courtesy of Breeze)."
 echo
-echo "     To stop Airflow and exit tmux just 'stop_airflow'."
+echo "     To stop Airflow and exit tmux just type 'stop_airflow'."
 echo

[airflow] 02/29: Remove upstart from docs (#16672)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 00e8072db677df815fac9f72ebb66f3404ba1538
Author: Ash Berlin-Taylor <as...@firemirror.com>
AuthorDate: Sat Jun 26 09:13:55 2021 +0100

    Remove upstart from docs (#16672)
    
    No modern dist ships upstart anymore (everyone has migrated to systemd,
    or never used upstart) so these docs have no value anymore.
    
    (cherry picked from commit 6d6eae3851de5c5b209a31201d1430a995e525ad)
---
 MANIFEST.in                                    |  1 -
 docs/apache-airflow/howto/index.rst            |  1 -
 docs/apache-airflow/howto/run-with-upstart.rst | 44 --------------------------
 scripts/upstart/README                         | 33 -------------------
 scripts/upstart/airflow-flower.conf            | 34 --------------------
 scripts/upstart/airflow-scheduler.conf         | 38 ----------------------
 scripts/upstart/airflow-webserver.conf         | 34 --------------------
 scripts/upstart/airflow-worker.conf            | 34 --------------------
 8 files changed, 219 deletions(-)

diff --git a/MANIFEST.in b/MANIFEST.in
index 9dcf610..4dc999f 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -25,7 +25,6 @@ graft airflow/www
 graft airflow/www/static
 graft airflow/www/templates
 graft scripts/systemd
-graft scripts/upstart
 graft airflow/config_templates
 recursive-exclude airflow/www/node_modules *
 global-exclude __pycache__  *.pyc
diff --git a/docs/apache-airflow/howto/index.rst b/docs/apache-airflow/howto/index.rst
index 9a7e68b..efd5c48 100644
--- a/docs/apache-airflow/howto/index.rst
+++ b/docs/apache-airflow/howto/index.rst
@@ -40,7 +40,6 @@ configuring an Airflow environment.
     variable
     run-behind-proxy
     run-with-systemd
-    run-with-upstart
     use-test-config
     define_extra_link
     email-config
diff --git a/docs/apache-airflow/howto/run-with-upstart.rst b/docs/apache-airflow/howto/run-with-upstart.rst
deleted file mode 100644
index cf6bd7f..0000000
--- a/docs/apache-airflow/howto/run-with-upstart.rst
+++ /dev/null
@@ -1,44 +0,0 @@
- .. Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
- ..   http://www.apache.org/licenses/LICENSE-2.0
-
- .. Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
-
-
-Running Airflow with upstart
-============================
-
-Airflow can integrate with upstart based systems. Upstart automatically starts all airflow services for which you
-have a corresponding ``*.conf`` file in ``/etc/init`` upon system boot. On failure, upstart automatically restarts
-the process (until it reaches re-spawn limit set in a ``*.conf`` file).
-
-You can find sample upstart job files in the ``scripts/upstart`` directory.
-
-The following assumptions have been used while creating these unit files:
-
-1. Airflow will run as the following ``user:group`` ``airflow:airflow``.
-   Change ``setuid`` and ``setgid`` appropriately in ``*.conf`` if airflow runs as a different user or group
-2. These files have been tested on **Ubuntu 14.04 LTS**
-   You may have to adjust ``start on`` and ``stop on`` stanzas to make it work on other upstart systems.
-   Some of the possible options are listed in ``scripts/upstart/README``
-
-Modify ``*.conf`` files as needed and copy to ``/etc/init`` directory.
-
-You can use ``initctl`` to manually start, stop, view status of the airflow process that has been
-integrated with upstart
-
-.. code-block:: bash
-
-    initctl airflow-webserver status
diff --git a/scripts/upstart/README b/scripts/upstart/README
deleted file mode 100644
index 124c18c..0000000
--- a/scripts/upstart/README
+++ /dev/null
@@ -1,33 +0,0 @@
-The upstart files in this directory are tested on Ubuntu 14.04 LTS based systems running in VPC on AWS.
-
-Copy *.conf files to /etc/init.
-
-You can then start airflow services by using initctl start <service>. Where <service> is airflow-worker,
-airflow-webserver, airflow-scheduler, etc.
-
-Upstart automatically starts all airflow services for which you have a corresponding *.conf file in /etc/init
-upon system boot. If service process dies, upstart will automatically re-spawn it (until it hits re-spawn limit
-set in a *.conf file)
-
-You may have to adjust `start on` & `stop on` stanzas to make it work on other upstart systems. Some of the possible
-options are listed below
-
-# This should work on most Linux distributions that support upstart
-start on started network-services
-
-# This is for Ubuntu based systems which lack generic network-services job
-# Wait for a non-loopback interface before starting airflow services
-start on (local-filesystems and net-device-up IFACE!=lo)
-
-# This should work on Ubuntu 11.10 based systems
-# Start after all network interfaces are up
-start on static-network-up
-
-# If nothing else works, use this
-start on runlevel [2345]
-
-It is assumed that airflow will run under `airflow:airflow`. Change `setuid` and `setgid` in *.conf files
-if you use other user/group
-
-You can use `initctl` to manually start, stop, view status of the airflow process.  For example
-`initctl status airflow-webserver`
diff --git a/scripts/upstart/airflow-flower.conf b/scripts/upstart/airflow-flower.conf
deleted file mode 100644
index 176ffb0..0000000
--- a/scripts/upstart/airflow-flower.conf
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-description "Airflow celery flower"
-
-start on started networking
-stop on (deconfiguring-networking or runlevel [016])
-
-respawn
-respawn limit 5 30
-
-setuid airflow
-setgid airflow
-
-# env AIRFLOW_CONFIG=
-# env AIRFLOW_HOME=
-# export AIRFLOW_CONFIG
-# export AIRFLOW_HOME
-
-exec usr/local/bin/airflow celery flower
diff --git a/scripts/upstart/airflow-scheduler.conf b/scripts/upstart/airflow-scheduler.conf
deleted file mode 100644
index e8bb1fe..0000000
--- a/scripts/upstart/airflow-scheduler.conf
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-description "Airflow scheduler daemon"
-
-start on started networking
-stop on (deconfiguring-networking or runlevel [016])
-
-respawn
-respawn limit 5 10
-
-setuid airflow
-setgid airflow
-
-# env AIRFLOW_CONFIG=
-# env AIRFLOW_HOME=
-# export AIRFLOW_CONFIG
-# export AIRFLOW_HOME
-
-# required setting, 0 sets it to unlimited. Scheduler will restart after every X runs
-env SCHEDULER_RUNS=5
-export SCHEDULER_RUNS
-
-exec usr/local/bin/airflow scheduler -n ${SCHEDULER_RUNS}
diff --git a/scripts/upstart/airflow-webserver.conf b/scripts/upstart/airflow-webserver.conf
deleted file mode 100644
index 241dcd9..0000000
--- a/scripts/upstart/airflow-webserver.conf
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-description "Airflow webserver daemon"
-
-start on started networking
-stop on (deconfiguring-networking or runlevel [016])
-
-respawn
-respawn limit 5 30
-
-setuid airflow
-setgid airflow
-
-# env AIRFLOW_CONFIG=
-# env AIRFLOW_HOME=
-# export AIRFLOW_CONFIG
-# export AIRFLOW_HOME
-
-exec usr/local/bin/airflow webserver
diff --git a/scripts/upstart/airflow-worker.conf b/scripts/upstart/airflow-worker.conf
deleted file mode 100644
index 94c3a7c..0000000
--- a/scripts/upstart/airflow-worker.conf
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-description "Airflow celery worker daemon"
-
-start on started networking
-stop on (deconfiguring-networking or runlevel [016])
-
-respawn
-respawn limit 5 30
-
-setuid airflow
-setgid airflow
-
-# env AIRFLOW_CONFIG=
-# env AIRFLOW_HOME=
-# export AIRFLOW_CONFIG
-# export AIRFLOW_HOME
-
-exec usr/local/bin/airflow celery worker

[airflow] 27/29: Fixes "development" and "rc" cross dependencies between providers (#17023)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 61dc4cacacf1478076ab0a2fb775df684591c1b0
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Thu Jul 15 16:17:18 2021 +0200

    Fixes "development" and "rc" cross dependencies between providers (#17023)
    
    In case we have additional dependencies between providers released
    at the same time (for example we need to release sftp and ssh
    packages now where sftp package depends on release of ssh
    at the same time) we have to add suffix to the version of the
    additional_dependency.
    
    PIP does not take into account unfortunately that development
    dependencies should likely be considered as fulfilling the
    requirement of >=. For example if you have:
    
    sftp depends on ssh>=2.1.0 and you release ssh 2.1.0.dev0 at
    the same time the ssh>=2.1.0 condition is not fulfilled.
    
    Same case will be with rc1. Therefore we need to add the suffix in such
    cross-provider dependencies to be able to install them in CI
    and in rc candidates.
    
    In the future we might ask PIP to change behaviour in such case.
    
    (cherry picked from commit 07e0a67bf641d25b0237b457f2585788fa4c8ada)
---
 dev/provider_packages/prepare_provider_packages.py | 21 ++++++++++++++++++---
 1 file changed, 18 insertions(+), 3 deletions(-)

diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index 16bd046..e36fcf8 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -357,11 +357,12 @@ def get_long_description(provider_package_id: str) -> str:
     return long_description
 
 
-def get_install_requirements(provider_package_id: str) -> List[str]:
+def get_install_requirements(provider_package_id: str, version_suffix: str) -> List[str]:
     """
     Returns install requirements for the package.
 
     :param provider_package_id: id of the provider package
+    :param version_suffix: optional version suffix for packages
 
     :return: install requirements of the package
     """
@@ -369,7 +370,21 @@ def get_install_requirements(provider_package_id: str) -> List[str]:
     provider_yaml = get_provider_yaml(provider_package_id)
     install_requires = []
     if "additional-dependencies" in provider_yaml:
-        install_requires = provider_yaml['additional-dependencies']
+        additional_dependencies = provider_yaml['additional-dependencies']
+        if version_suffix:
+            # In case we are preparing "rc" or dev0 packages, we should also
+            # make sure that cross-dependency with Airflow or Airflow Providers will
+            # contain the version suffix, otherwise we will have conflicting dependencies.
+            # For example if (in sftp) we have ssh>=2.0.1 and release ssh==2.0.1
+            # we want to turn this into ssh>=2.0.1.dev0 if we build dev0 version of the packages
+            # or >=2.0.1rc1 if we build rc1 version of the packages.
+            for dependency in additional_dependencies:
+                if dependency.startswith("apache-airflow") and ">=" in dependency:
+                    dependency = dependency + version_suffix
+                install_requires.append(dependency)
+        else:
+            install_requires.extend(additional_dependencies)
+
     install_requires.extend(dependencies)
     return install_requires
 
@@ -1513,7 +1528,7 @@ def get_provider_jinja_context(
         "PROVIDERS_FOLDER": "providers",
         "PROVIDER_DESCRIPTION": provider_details.provider_description,
         "INSTALL_REQUIREMENTS": get_install_requirements(
-            provider_package_id=provider_details.provider_package_id
+            provider_package_id=provider_details.provider_package_id, version_suffix=version_suffix
         ),
         "SETUP_REQUIREMENTS": get_setup_requirements(),
         "EXTRAS_REQUIREMENTS": get_package_extras(provider_package_id=provider_details.provider_package_id),

[airflow] 28/29: Updated clean-logs.sh (#16978)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 5c7505fc101585437a1b411fc293f6b0f3915260
Author: Shraman Basyal <49...@users.noreply.github.com>
AuthorDate: Fri Jul 16 00:53:19 2021 -0500

    Updated clean-logs.sh (#16978)
    
    (cherry picked from commit d9221df74e4163a626bc05fc45d5e063a8c51d63)
---
 scripts/in_container/prod/clean-logs.sh | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/scripts/in_container/prod/clean-logs.sh b/scripts/in_container/prod/clean-logs.sh
index c36554a..e05e4f2 100755
--- a/scripts/in_container/prod/clean-logs.sh
+++ b/scripts/in_container/prod/clean-logs.sh
@@ -19,12 +19,12 @@
 
 set -euo pipefail
 
-DIRECTORY="${AIRFLOW_HOME:-/usr/local/airflow}"
-RETENTION="${AIRFLOW__LOG_RETENTION_DAYS:-15}"
+readonly DIRECTORY="${AIRFLOW_HOME:-/usr/local/airflow}"
+readonly RETENTION="${AIRFLOW__LOG_RETENTION_DAYS:-15}"
 
 trap "exit" INT TERM
 
-EVERY=$((15*60))
+readonly EVERY=$((15*60))
 
 echo "Cleaning logs every $EVERY seconds"
 
@@ -33,5 +33,5 @@ while true; do
   find "${DIRECTORY}"/logs -mtime +"${RETENTION}" -name '*.log' -delete
 
   seconds=$(( $(date -u +%s) % EVERY))
-  [[ $seconds -lt 1 ]] || sleep $((EVERY - seconds))
+  (( seconds < 1 )) || sleep $((EVERY - seconds))
 done

[airflow] 04/29: Add --executor option to breeze kind-cluster deploy command (#15661)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 91b59e90c815fc4dfb2cd57f85d9515f893273e1
Author: Ephraim Anierobi <sp...@gmail.com>
AuthorDate: Wed May 5 05:41:40 2021 +0100

    Add --executor option to breeze kind-cluster deploy command (#15661)
    
    This change will enable us easily deploy airflow to kubernetes cluster
    and test it using different executors.
    Example usage:
       ./breeze kind-cluster --executor CeleryExecutor deploy
    
    (cherry picked from commit e47f7e42b632ad78a204531e385ec09bcce10816)
---
 scripts/ci/libraries/_kind.sh | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh
index 085dfac..d5adadf 100644
--- a/scripts/ci/libraries/_kind.sh
+++ b/scripts/ci/libraries/_kind.sh
@@ -140,6 +140,10 @@ function kind::perform_kind_cluster_operation() {
     echo "Kubernetes mode: ${KUBERNETES_MODE}"
     echo
 
+    echo
+    echo "Executor: ${EXECUTOR}"
+    echo
+
     if [[ ${operation} == "status" ]]; then
         if [[ ${all_clusters} == *"${KIND_CLUSTER_NAME}"* ]]; then
             echo

[airflow] 06/29: Change default airflow version in Dockerfile (#16714)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 11e73c51b8ddc43466c9e55fb1621760dec85a54
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Tue Jun 29 22:02:32 2021 +0200

    Change default airflow version in Dockerfile (#16714)
    
    (cherry picked from commit 648abc1cfda9ea33c44eb90d271add7baae86d3b)
---
 Dockerfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Dockerfile b/Dockerfile
index 6a4b75d..8c32913 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -33,7 +33,7 @@
 #                        all the build essentials. This makes the image
 #                        much smaller.
 #
-ARG AIRFLOW_VERSION="2.0.2"
+ARG AIRFLOW_VERSION="2.2.0.dev0"
 ARG AIRFLOW_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
 ARG ADDITIONAL_AIRFLOW_EXTRAS=""
 ARG ADDITIONAL_PYTHON_DEPS=""

[airflow] 17/29: Pulls latest images to build images in "Build Image" flow (#16948)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 079acb1bfeb0cfd0fa3951d0406ee53b8d1ff953
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Jul 12 19:10:25 2021 +0200

    Pulls latest images to build images in "Build Image" flow (#16948)
    
    The recent switching to GHCR.io revealed a problem with misconfigured
    GITHUB_REGISTRY_PULL_IMAGE_TAG variable for PROD images.
    
    It was trying to pull build image with COMMIT_SHA before it was
    built (This was previously hidden by fallback of pulling image
    from DockerHub and one of the reasons of slower builds of PROD images
    
    This PR should fix it.
    
    (cherry picked from commit 4d8fd37b6d110b348f29e677eaaaf9e447c837c7)
---
 .github/workflows/build-images.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml
index 34aa82e..363455c 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -231,7 +231,6 @@ jobs:
       - name: Set envs
         run: |
           echo "GITHUB_REGISTRY_PUSH_IMAGE_TAG=${TARGET_COMMIT_SHA}" >> "$GITHUB_ENV"
-          echo "GITHUB_REGISTRY_PULL_IMAGE_TAG=${TARGET_COMMIT_SHA}" >> "$GITHUB_ENV"
       - uses: actions/checkout@v2
         with:
           ref: ${{ env.TARGET_COMMIT_SHA }}

[airflow] 21/29: Move CI-integration images to ghcr.io (#16797)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit e10acaa38854b1b4896059e314cd0ce63f932159
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Jul 14 07:39:00 2021 +0200

    Move CI-integration images to ghcr.io (#16797)
    
    This is the final step of moving the images used for CI integration
    to `ghcr.io` from DockerHub. With Publicly available images
    with self-management provided by GitHub, we can finally move to
    keep the images "properly" - i.e. each image is separate and
    tag is only image version.
    
    Part of #16555
    
    (cherry picked from commit 3143f1af44f96088eb417f34cc111d7361b4c6d8)
---
 BREEZE.rst                                               |  2 +-
 CI.rst                                                   |  6 +++---
 breeze                                                   |  2 +-
 kubernetes_tests/test_kubernetes_pod_operator.py         |  2 +-
 scripts/ci/docker-compose/integration-kerberos.yml       |  2 +-
 scripts/ci/docker-compose/integration-openldap.yml       |  2 +-
 scripts/ci/docker-compose/integration-trino.yml          |  2 +-
 scripts/ci/dockerfiles/apache-rat/build_and_push.sh      | 11 +++++------
 scripts/ci/dockerfiles/bats/build_and_push.sh            | 11 +++++------
 scripts/ci/dockerfiles/krb5-kdc-server/build_and_push.sh | 11 +++++------
 scripts/ci/dockerfiles/stress/build_and_push.sh          | 12 ++++++------
 scripts/ci/dockerfiles/trino/Dockerfile                  |  2 +-
 scripts/ci/dockerfiles/trino/build_and_push.sh           | 13 ++++++-------
 scripts/ci/static_checks/bats_tests.sh                   |  7 +++----
 scripts/ci/static_checks/check_license.sh                |  2 +-
 tests/kubernetes/pod.yaml                                |  2 +-
 tests/kubernetes/test_pod_generator.py                   |  4 ++--
 17 files changed, 44 insertions(+), 49 deletions(-)

diff --git a/BREEZE.rst b/BREEZE.rst
index 7e6bedd..90d3f0b 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1199,7 +1199,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
         you want to pull the image with specific COMMIT_SHA tag.
 
         'breeze shell \
-              -- github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
+              --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
         'breeze \
               --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
 
diff --git a/CI.rst b/CI.rst
index d0eec1b..fedd300 100644
--- a/CI.rst
+++ b/CI.rst
@@ -78,7 +78,7 @@ this image can be built only once and used by all the jobs running tests. The im
 rather than build it from the scratch. Pulling such image takes ~ 1 minute, thanks to that we are saving
 a lot of precious time for jobs.
 
-We use `GitHub Container Registry <https://docs.github.com/en/packages/guides/about-github-container-registry>`_
+We use `GitHub Container Registry <https://docs.github.com/en/packages/guides/about-github-container-registry>`_.
 ``GITHUB_TOKEN`` is needed to push to the registry and we configured scopes of the tokens in our jobs
 to be able to write to the registry.
 
@@ -411,9 +411,9 @@ The following components are part of the CI infrastructure
 * **GitHub Actions** -  (GA) UI + execution engine for our jobs
 * **GA CRON trigger** - GitHub Actions CRON triggering our jobs
 * **GA Workers** - virtual machines running our jobs at GitHub Actions (max 20 in parallel)
-* **GitHub Image Registry**- image registry used as build cache for CI  jobs.
+* **GitHub Image Registry** - image registry used as build cache for CI jobs.
   It is at https://ghcr.io/apache/airflow
-* **DockerHub Image Registry**- image registry used to pull base Python images and (manually) publish
+* **DockerHub Image Registry** - image registry used to pull base Python images and (manually) publish
   the released Production Airflow images. It is at https://dockerhub.com/apache/airflow
 * **Official Images** (future) - these are official images that are prominently visible in DockerHub.
   We aim our images to become official images so that you will be able to pull them
diff --git a/breeze b/breeze
index 3b09208..a6454c5 100755
--- a/breeze
+++ b/breeze
@@ -1665,7 +1665,7 @@ ${CMDNAME} shell [FLAGS] [-- <EXTRA_ARGS>]
       you want to pull the image with specific COMMIT_SHA tag.
 
       '${CMDNAME} shell \\
-            -- github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
+            --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
       '${CMDNAME} \\
             --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
 
diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes_tests/test_kubernetes_pod_operator.py
index 9d32782..63b3fb6 100644
--- a/kubernetes_tests/test_kubernetes_pod_operator.py
+++ b/kubernetes_tests/test_kubernetes_pod_operator.py
@@ -869,7 +869,7 @@ class TestKubernetesPodOperatorSystem(unittest.TestCase):
                         'command': ['stress'],
                         'env': [],
                         'envFrom': [],
-                        'image': 'apache/airflow-ci:stress-2021.04.28-1.0.4',
+                        'image': 'ghcr.io/apache/airflow-stress:1.0.4-2021.07.04',
                         'imagePullPolicy': 'IfNotPresent',
                         'name': 'base',
                         'ports': [],
diff --git a/scripts/ci/docker-compose/integration-kerberos.yml b/scripts/ci/docker-compose/integration-kerberos.yml
index b9ca4cc..0e79c6a 100644
--- a/scripts/ci/docker-compose/integration-kerberos.yml
+++ b/scripts/ci/docker-compose/integration-kerberos.yml
@@ -18,7 +18,7 @@
 version: "2.2"
 services:
   kdc-server-example-com:
-    image: apache/airflow-ci:krb5-kdc-server-2021.04.28
+    image: ghcr.io/apache/airflow-krb5-kdc-server:2021.07.04
     hostname: krb5-kdc-server-example-com
     domainname: example.com
     networks:
diff --git a/scripts/ci/docker-compose/integration-openldap.yml b/scripts/ci/docker-compose/integration-openldap.yml
index 60ff58a..eea3246 100644
--- a/scripts/ci/docker-compose/integration-openldap.yml
+++ b/scripts/ci/docker-compose/integration-openldap.yml
@@ -18,7 +18,7 @@
 version: "2.2"
 services:
   openldap:
-    image: apache/airflow-ci:openldap-2020.07.10-2.4.50
+    image: ghcr.io/apache/airflow-openldap:2.4.50-2021.07.04
     command: "--copy-service"
     environment:
       - LDAP_DOMAIN=example.com
diff --git a/scripts/ci/docker-compose/integration-trino.yml b/scripts/ci/docker-compose/integration-trino.yml
index 822a2ff..e381ced 100644
--- a/scripts/ci/docker-compose/integration-trino.yml
+++ b/scripts/ci/docker-compose/integration-trino.yml
@@ -18,7 +18,7 @@
 version: "2.2"
 services:
   trino:
-    image: apache/airflow-ci:trino-2021.04.28
+    image: ghcr.io/apache/airflow-trino:359-2021.07.04
     container_name: trino
     hostname: trino
     domainname: example.com
diff --git a/scripts/ci/dockerfiles/apache-rat/build_and_push.sh b/scripts/ci/dockerfiles/apache-rat/build_and_push.sh
index ceed534..228b301 100755
--- a/scripts/ci/dockerfiles/apache-rat/build_and_push.sh
+++ b/scripts/ci/dockerfiles/apache-rat/build_and_push.sh
@@ -16,15 +16,13 @@
 # specific language governing permissions and limitations
 # under the License.
 set -euo pipefail
-DOCKERHUB_USER=${DOCKERHUB_USER:="apache"}
-DOCKERHUB_REPO=${DOCKERHUB_REPO:="airflow-ci"}
-readonly DOCKERHUB_USER
-readonly DOCKERHUB_REPO
+GITHUB_REPOSITORY=${GITHUB_REPOSITORY:="apache/airflow"}
+readonly GITHUB_REPOSITORY
 
 APACHERAT_VERSION="0.13"
 readonly APACHERAT_VERSION
 
-AIRFLOW_APACHERAT_VERSION="2021.04.28"
+AIRFLOW_APACHERAT_VERSION="2021.07.04"
 readonly AIRFLOW_APACHERAT_VERSION
 
 COMMIT_SHA=$(git rev-parse HEAD)
@@ -32,7 +30,7 @@ readonly COMMIT_SHA
 
 cd "$( dirname "${BASH_SOURCE[0]}" )" || exit 1
 
-TAG="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:apache-rat-${AIRFLOW_APACHERAT_VERSION}-${APACHERAT_VERSION}"
+TAG="ghcr.io/${GITHUB_REPOSITORY}-apache-rat:${APACHERAT_VERSION}-${AIRFLOW_APACHERAT_VERSION}"
 readonly TAG
 
 docker build . \
@@ -40,6 +38,7 @@ docker build . \
     --build-arg "APACHERAT_VERSION=${APACHERAT_VERSION}" \
     --build-arg "AIRFLOW_APACHERAT_VERSION=${AIRFLOW_APACHERAT_VERSION}" \
     --build-arg "COMMIT_SHA=${COMMIT_SHA}" \
+    --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \
     --tag "${TAG}"
 
 docker push "${TAG}"
diff --git a/scripts/ci/dockerfiles/bats/build_and_push.sh b/scripts/ci/dockerfiles/bats/build_and_push.sh
index 1f35c9a..20b6c02 100755
--- a/scripts/ci/dockerfiles/bats/build_and_push.sh
+++ b/scripts/ci/dockerfiles/bats/build_and_push.sh
@@ -16,10 +16,8 @@
 # specific language governing permissions and limitations
 # under the License.
 set -euo pipefail
-DOCKERHUB_USER=${DOCKERHUB_USER:="apache"}
-DOCKERHUB_REPO=${DOCKERHUB_REPO:="airflow-ci"}
-readonly DOCKERHUB_USER
-readonly DOCKERHUB_REPO
+GITHUB_REPOSITORY=${GITHUB_REPOSITORY:="apache/airflow"}
+readonly GITHUB_REPOSITORY
 
 BATS_VERSION="1.2.1"
 BATS_ASSERT_VERSION="2.0.0"
@@ -30,7 +28,7 @@ readonly BATS_ASSERT_VERSION
 readonly BATS_SUPPORT_VERSION
 readonly BATS_FILE_VERSION
 
-AIRFLOW_BATS_VERSION="2021.04.28"
+AIRFLOW_BATS_VERSION="2021.07.04"
 readonly AIRFLOW_BATS_VERSION
 
 COMMIT_SHA=$(git rev-parse HEAD)
@@ -38,7 +36,7 @@ readonly COMMIT_SHA
 
 cd "$( dirname "${BASH_SOURCE[0]}" )" || exit 1
 
-TAG="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:bats-${AIRFLOW_BATS_VERSION}-${BATS_VERSION}"
+TAG="ghcr.io/${GITHUB_REPOSITORY}-bats:${BATS_VERSION}-${AIRFLOW_BATS_VERSION}"
 readonly TAG
 
 docker build . \
@@ -48,6 +46,7 @@ docker build . \
     --build-arg "BATS_FILE_VERSION=${BATS_FILE_VERSION}" \
     --build-arg "BATS_ASSERT_VERSION=${BATS_ASSERT_VERSION}" \
     --build-arg "COMMIT_SHA=${COMMIT_SHA}" \
+    --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \
     --tag "${TAG}"
 
 docker push "${TAG}"
diff --git a/scripts/ci/dockerfiles/krb5-kdc-server/build_and_push.sh b/scripts/ci/dockerfiles/krb5-kdc-server/build_and_push.sh
index e1a242a..6ad4354 100755
--- a/scripts/ci/dockerfiles/krb5-kdc-server/build_and_push.sh
+++ b/scripts/ci/dockerfiles/krb5-kdc-server/build_and_push.sh
@@ -16,12 +16,10 @@
 # specific language governing permissions and limitations
 # under the License.
 set -euo pipefail
-DOCKERHUB_USER=${DOCKERHUB_USER:="apache"}
-DOCKERHUB_REPO=${DOCKERHUB_REPO:="airflow-ci"}
-readonly DOCKERHUB_USER
-readonly DOCKERHUB_REPO
+GITHUB_REPOSITORY=${GITHUB_REPOSITORY:="apache/airflow"}
+readonly GITHUB_REPOSITORY
 
-AIRFLOW_KRB5KDCSERVER_VERSION="2021.04.28"
+AIRFLOW_KRB5KDCSERVER_VERSION="2021.07.04"
 readonly AIRFLOW_KRB5KDCSERVER_VERSION
 
 COMMIT_SHA=$(git rev-parse HEAD)
@@ -29,13 +27,14 @@ readonly COMMIT_SHA
 
 cd "$( dirname "${BASH_SOURCE[0]}" )" || exit 1
 
-TAG="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:krb5-kdc-server-${AIRFLOW_KRB5KDCSERVER_VERSION}"
+TAG="ghcr.io/${GITHUB_REPOSITORY}-krb5-kdc-server:${AIRFLOW_KRB5KDCSERVER_VERSION}"
 readonly TAG
 
 docker build . \
     --pull \
     --build-arg "AIRFLOW_KRB5KDCSERVER_VERSION=${AIRFLOW_KRB5KDCSERVER_VERSION}" \
     --build-arg "COMMIT_SHA=${COMMIT_SHA}" \
+    --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \
     --tag "${TAG}"
 
 docker push "${TAG}"
diff --git a/scripts/ci/dockerfiles/stress/build_and_push.sh b/scripts/ci/dockerfiles/stress/build_and_push.sh
index c158456..ea3144d 100755
--- a/scripts/ci/dockerfiles/stress/build_and_push.sh
+++ b/scripts/ci/dockerfiles/stress/build_and_push.sh
@@ -16,14 +16,13 @@
 # specific language governing permissions and limitations
 # under the License.
 set -euo pipefail
-DOCKERHUB_USER=${DOCKERHUB_USER:="apache"}
-DOCKERHUB_REPO=${DOCKERHUB_REPO:="airflow-ci"}
-readonly DOCKERHUB_USER
-readonly DOCKERHUB_REPO
+GITHUB_REPOSITORY=${GITHUB_REPOSITORY:="apache/airflow"}
+readonly GITHUB_REPOSITORY
+
 STRESS_VERSION="1.0.4"
 readonly STRESS_VERSION
 
-AIRFLOW_STRESS_VERSION="2021.04.28"
+AIRFLOW_STRESS_VERSION="2021.07.04"
 readonly AIRFLOW_STRESS_VERSION
 
 COMMIT_SHA=$(git rev-parse HEAD)
@@ -31,7 +30,7 @@ readonly COMMIT_SHA
 
 cd "$( dirname "${BASH_SOURCE[0]}" )" || exit 1
 
-TAG="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:stress-${AIRFLOW_STRESS_VERSION}-${STRESS_VERSION}"
+TAG="ghcr.io/${GITHUB_REPOSITORY}-stress:${STRESS_VERSION}-${AIRFLOW_STRESS_VERSION}"
 readonly TAG
 
 docker build . \
@@ -39,6 +38,7 @@ docker build . \
     --build-arg "STRESS_VERSION=${STRESS_VERSION}" \
     --build-arg "AIRFLOW_STRESS_VERSION=${AIRFLOW_STRESS_VERSION}" \
     --build-arg "COMMIT_SHA=${COMMIT_SHA}" \
+    --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \
     --tag "${TAG}"
 
 docker push "${TAG}"
diff --git a/scripts/ci/dockerfiles/trino/Dockerfile b/scripts/ci/dockerfiles/trino/Dockerfile
index 32ef0fd..a36a8cc 100644
--- a/scripts/ci/dockerfiles/trino/Dockerfile
+++ b/scripts/ci/dockerfiles/trino/Dockerfile
@@ -14,7 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-ARG TRINO_VERSION="354"
+ARG TRINO_VERSION="359"
 FROM trinodb/trino:${TRINO_VERSION}
 
 # Obtain root privileges
diff --git a/scripts/ci/dockerfiles/trino/build_and_push.sh b/scripts/ci/dockerfiles/trino/build_and_push.sh
index c5ebbc7c4..3f876f7 100755
--- a/scripts/ci/dockerfiles/trino/build_and_push.sh
+++ b/scripts/ci/dockerfiles/trino/build_and_push.sh
@@ -16,15 +16,13 @@
 # specific language governing permissions and limitations
 # under the License.
 set -euo pipefail
-DOCKERHUB_USER=${DOCKERHUB_USER:="apache"}
-DOCKERHUB_REPO=${DOCKERHUB_REPO:="airflow-ci"}
-readonly DOCKERHUB_USER
-readonly DOCKERHUB_REPO
+GITHUB_REPOSITORY=${GITHUB_REPOSITORY:="apache/airflow"}
+readonly GITHUB_REPOSITORY
 
-TRINO_VERSION="354"
+TRINO_VERSION="359"
 readonly TRINO_VERSION
 
-AIRFLOW_TRINO_VERSION="2021.04.28"
+AIRFLOW_TRINO_VERSION="2021.07.04"
 readonly AIRFLOW_TRINO_VERSION
 
 COMMIT_SHA=$(git rev-parse HEAD)
@@ -32,7 +30,7 @@ readonly COMMIT_SHA
 
 cd "$( dirname "${BASH_SOURCE[0]}" )" || exit 1
 
-TAG="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:trino-${AIRFLOW_TRINO_VERSION}"
+TAG="ghcr.io/${GITHUB_REPOSITORY}-trino:${TRINO_VERSION}-${AIRFLOW_TRINO_VERSION}"
 readonly TAG
 
 docker build . \
@@ -40,6 +38,7 @@ docker build . \
     --build-arg "TRINO_VERSION=${TRINO_VERSION}" \
     --build-arg "AIRFLOW_TRINO_VERSION=${AIRFLOW_TRINO_VERSION}" \
     --build-arg "COMMIT_SHA=${COMMIT_SHA}" \
+    --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \
     --tag "${TAG}"
 
 docker push "${TAG}"
diff --git a/scripts/ci/static_checks/bats_tests.sh b/scripts/ci/static_checks/bats_tests.sh
index 6130ad5..55a62f3 100755
--- a/scripts/ci/static_checks/bats_tests.sh
+++ b/scripts/ci/static_checks/bats_tests.sh
@@ -50,19 +50,18 @@ function run_bats_tests() {
         fi
 
     done
+    local airflow_bats_image="ghcr.io/apache/airflow-bats:1.2.1-2021.07.04"
     # deduplicate
     FS=" " read -r -a bats_arguments <<< "$(tr ' ' '\n' <<< "${bats_arguments[@]}" | sort -u | tr '\n' ' ' )"
     if [[ ${#@} == "0" ]]; then
         # Run al tests
-        docker run --workdir /airflow -v "$(pwd):/airflow" --rm \
-            apache/airflow-ci:bats-2021.04.28-1.2.1 --tap /airflow/tests/bats/
+        docker run --workdir /airflow -v "$(pwd):/airflow" --rm "${airflow_bats_image}" --tap /airflow/tests/bats/
     elif [[ ${#bats_arguments} == "0" ]]; then
         # Skip running anything if all filtered out
         true
     else
         # Run selected tests
-        docker run --workdir /airflow -v "$(pwd):/airflow" --rm \
-            apache/airflow-ci:bats-2021.04.28-1.2.1 --tap "${bats_arguments[@]}"
+        docker run --workdir /airflow -v "$(pwd):/airflow" --rm "${airflow_bats_image}" --tap "${bats_arguments[@]}"
     fi
 }
 
diff --git a/scripts/ci/static_checks/check_license.sh b/scripts/ci/static_checks/check_license.sh
index ce6e6fa..ef365de 100755
--- a/scripts/ci/static_checks/check_license.sh
+++ b/scripts/ci/static_checks/check_license.sh
@@ -34,7 +34,7 @@ function run_check_license() {
     if ! docker_v run -v "${AIRFLOW_SOURCES}:/opt/airflow" -t \
             --user "$(id -ur):$(id -gr)" \
             --rm --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
-            apache/airflow-ci:apache-rat-2021.04.28-0.13 \
+            ghcr.io/apache/airflow-apache-rat:0.13-2021.07.04 \
             --exclude-file /opt/airflow/.rat-excludes \
             --d /opt/airflow | tee "${AIRFLOW_SOURCES}/logs/rat-results.txt" ; then
         echo
diff --git a/tests/kubernetes/pod.yaml b/tests/kubernetes/pod.yaml
index 5e02880..b698444 100644
--- a/tests/kubernetes/pod.yaml
+++ b/tests/kubernetes/pod.yaml
@@ -23,7 +23,7 @@ metadata:
 spec:
   containers:
     - name: memory-demo-ctr
-      image: apache/airflow-ci:stress-2021.04.28-1.0.4
+      image: ghcr.io/apache/airflow-stress:1.0.4-2021.07.04
       resources:
         limits:
           memory: "200Mi"
diff --git a/tests/kubernetes/test_pod_generator.py b/tests/kubernetes/test_pod_generator.py
index 3ca60c1..b288075 100644
--- a/tests/kubernetes/test_pod_generator.py
+++ b/tests/kubernetes/test_pod_generator.py
@@ -50,7 +50,7 @@ class TestPodGenerator(unittest.TestCase):
                     {
                         'args': ['--vm', '1', '--vm-bytes', '150M', '--vm-hang', '1'],
                         'command': ['stress'],
-                        'image': 'apache/airflow-ci:stress-2021.04.28-1.0.4',
+                        'image': 'ghcr.io/apache/airflow-stress:1.0.4-2021.07.04',
                         'name': 'memory-demo-ctr',
                         'resources': {'limits': {'memory': '200Mi'}, 'requests': {'memory': '100Mi'}},
                     }
@@ -688,7 +688,7 @@ metadata:
 spec:
   containers:
     - name: memory-demo-ctr
-      image: apache/airflow-ci:stress-2021.04.28-1.0.4
+      image: ghcr.io/apache/airflow-stress:1.0.4-2021.07.04
       resources:
         limits:
           memory: "200Mi"

[airflow] 26/29: Dev: Bump stale action to v4 (#17025)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4047c78d2d5fffe306463a6865725350625f32bf
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Thu Jul 15 14:22:53 2021 +0100

    Dev: Bump stale action to v4 (#17025)
    
    https://github.com/actions/stale/releases/tag/v4.0.0 -- Looking forward for colored logs
    (cherry picked from commit 7f9cdc9164363813e8027f639c7baca2e5641199)
---
 .github/workflows/stale.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index d1e8956..a1bdd2b 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -29,7 +29,7 @@ jobs:
   stale:
     runs-on: ubuntu-20.04
     steps:
-      - uses: actions/stale@v3
+      - uses: actions/stale@v4
         with:
           stale-pr-message: >
             This pull request has been automatically marked as stale because it has not had

[airflow] 01/29: Fix permissions for CodeQL workflows (#16660)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 8c336c143d547776dca600453b1da58a318d11d6
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Fri Jun 25 19:35:37 2021 +0200

    Fix permissions for CodeQL workflows (#16660)
    
    After limiting permissions, our CodeQL workflow started failing.
    
    This is because it needs some extra permissions as explained in
    the https://github.com/github/codeql-action/issues/464
    
    This PR adds the required permissions.
    
    (cherry picked from commit b8a9e9fba6737500bdcce920028ece87a31ab129)
---
 .github/workflows/codeql-analysis.yml | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 096177a..6d6f4d0 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -67,7 +67,11 @@ jobs:
         # Override automatic language detection by changing the below list
         # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
         language: ['python', 'javascript']
-
+    permissions:
+      actions: read
+      contents: read
+      pull-requests: read
+      security-events: write
     steps:
       - name: Checkout repository
         uses: actions/checkout@v2

[airflow] 22/29: Errors out instead of trying to workaround buggy docker-compose v2 (#16989)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c450b662a66f371f8b90a9bfedc8e2baf8b555a1
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Jul 14 13:33:14 2021 +0200

    Errors out instead of trying to workaround buggy docker-compose v2 (#16989)
    
    Docker-Compose v2 Beta has an error in processing environment
    variable file which prevents Breeze from running. Until it is
    fixed, we are going to print an error, explain how to disable
    it and exit - because the workaround introduces more problems
    than it solves (passing environment variables to container
    is broken partially)
    
    Also see https://github.com/docker/compose-cli/issues/1917
    
    (cherry picked from commit 97ae0f2bf36033a69e6221b569d063f856491571)
---
 breeze                                        | 16 ++++++
 scripts/ci/docker-compose/_docker_compose.env | 70 ---------------------------
 scripts/ci/docker-compose/base.yml            |  2 +-
 3 files changed, 17 insertions(+), 71 deletions(-)

diff --git a/breeze b/breeze
index a6454c5..d048bd8 100755
--- a/breeze
+++ b/breeze
@@ -590,6 +590,22 @@ function breeze::prepare_command_file() {
     local compose_file="${3}"
     cat <<EOF >"${file}"
 #!/usr/bin/env bash
+docker_compose_version=\$(docker-compose --version)
+if [[ \${docker_compose_version} =~ .*version\ 2.* ]]; then
+  echo
+  echo "${COLOR_RED}Docker Compose Beta version 2has bug that prevents breeze from running.${COLOR_RESET}"
+  echo "${COLOR_RED}You have: \${docker_compose_version}.${COLOR_RESET}"
+  echo
+  echo "${COLOR_YELLOW}Please switch to stable version via Docker Desktop -> Experimental or by running:${COLOR_RESET}"
+  echo
+  echo "${COLOR_CYAN}docker-compose disable-v2${COLOR_RESET}"
+  echo
+  echo "${COLOR_YELLOW}Also please upvote https://github.com/docker/compose-cli/issues/1917${COLOR_RESET}"
+  echo
+  echo "${COLOR_RED}Exiting until you disable v2 version.${COLOR_RESET}"
+  exit 1
+fi
+
 if [[ \${VERBOSE} == "true" ]]; then
   echo
   echo "Executing script:"
diff --git a/scripts/ci/docker-compose/_docker_compose.env b/scripts/ci/docker-compose/_docker_compose.env
deleted file mode 100644
index b842270..0000000
--- a/scripts/ci/docker-compose/_docker_compose.env
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# Until https://github.com/docker/compose-cli/issues/1917 is resolved
-# we need to keep this docker_compose.env separately to accoun for different
-# behaviour of Docker-Compose V2 Beta (which became default for MacOS users updating
-# their Docker Desktop to a newer version
-#
-AIRFLOW_CI_IMAGE=
-AIRFLOW_EXTRAS=
-BACKEND=
-BREEZE=
-CI=
-CI_BUILD_ID=
-CI_JOB_ID=
-CI_EVENT_TYPE=
-CI_TARGET_REPO=
-CI_TARGET_BRANCH=
-COMMIT_SHA=
-DB_RESET=
-DEFAULT_CONSTRAINTS_BRANCH=
-ENABLED_INTEGRATIONS=
-ENABLED_SYSTEMS=
-GITHUB_ACTIONS=
-GITHUB_REGISTRY_PULL_IMAGE_TAG=
-HOST_USER_ID=
-HOST_GROUP_ID=
-HOST_OS=
-HOST_HOME=
-INIT_SCRIPT_FILE=
-INSTALL_AIRFLOW_VERSION=
-GENERATE_CONSTRAINTS_MODE=
-INSTALL_PROVIDERS_FROM_SOURCES=
-USE_AIRFLOW_VERSION=
-USE_PACKAGES_FROM_DIST=
-ISSUE_ID=
-LOAD_DEFAULT_CONNECTIONS=
-LOAD_EXAMPLES=
-MYSQL_VERSION=
-NUM_RUNS=
-PACKAGE_FORMAT=
-POSTGRES_VERSION=
-PRINT_INFO_FROM_SCRIPTS=
-PYTHONDONTWRITEBYTECODE=
-PYTHON_MAJOR_MINOR_VERSION=
-RUN_TESTS=
-RUN_INTEGRATION_TESTS=
-RUN_SYSTEM_TESTS=
-START_AIRFLOW=
-TEST_TYPE=
-UPGRADE_TO_NEWER_DEPENDENCIES=
-VERBOSE=
-VERBOSE_COMMANDS=
-VERSION_SUFFIX_FOR_PYPI=
-VERSION_SUFFIX_FOR_SVN=
-WHEEL_VERSION=
diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml
index a9ae734..34cc100 100644
--- a/scripts/ci/docker-compose/base.yml
+++ b/scripts/ci/docker-compose/base.yml
@@ -26,7 +26,7 @@ services:
       - KUBECONFIG=/files/.kube/config
       - HOST_HOME=${HOME}
     env_file:
-      - _docker_compose.env
+      - _docker.env
     volumes:
       # Pass docker to inside of the container so that Kind and Moto tests can use it.
       - /var/run/docker.sock:/var/run/docker.sock

[airflow] 23/29: Fix release guide when copying artifacts (#17001)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 54d2ae9ccce6adfa7a4c4fbdbdbc30d8bf6d1001
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Wed Jul 14 21:21:23 2021 +0100

    Fix release guide when copying artifacts (#17001)
    
    When copying artifacts from dev svn repo to release repo:
    
    Before:
    
    ```
    ❯ for f in ${AIRFLOW_DEV_SVN}/$RC/*; do
    echo "${$(basename $f)/rc?/}"
    done
    apache-airflow-2.1.2-sou.tar.gz
    apache-airflow-2.1.2-sou.tar.gz.asc
    apache-airflow-2.1.2-sou.tar.gz.sha512
    apache-airflow-2.1.2.tar.gz
    apache-airflow-2.1.2.tar.gz.asc
    apache-airflow-2.1.2.tar.gz.sha512
    apache_airflow-2.1.2-py3-none-any.whl
    apache_airflow-2.1.2-py3-none-any.whl.asc
    apache_airflow-2.1.2-py3-none-any.whl.sha512
    ```
    
    After:
    
    ```
    ❯ for f in ${AIRFLOW_DEV_SVN}/$RC/*; do
    echo "${$(basename $f)/}"
    done
    apache-airflow-2.1.2-source.tar.gz
    apache-airflow-2.1.2-source.tar.gz.asc
    apache-airflow-2.1.2-source.tar.gz.sha512
    apache-airflow-2.1.2.tar.gz
    apache-airflow-2.1.2.tar.gz.asc
    apache-airflow-2.1.2.tar.gz.sha512
    apache_airflow-2.1.2-py3-none-any.whl
    apache_airflow-2.1.2-py3-none-any.whl.asc
    apache_airflow-2.1.2-py3-none-any.whl.sha512
    ```
    
    (cherry picked from commit ded4beb249a411456320381e62f1929dbfdd79f3)
---
 dev/README_RELEASE_AIRFLOW.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md
index 6068508..247835a 100644
--- a/dev/README_RELEASE_AIRFLOW.md
+++ b/dev/README_RELEASE_AIRFLOW.md
@@ -555,9 +555,9 @@ cd "${VERSION}"
 
 # Move the artifacts to svn folder & commit
 for f in ${AIRFLOW_DEV_SVN}/$RC/*; do
-    svn cp "$f" "${$(basename $f)/rc?/}"
+    svn cp "$f" "${$(basename $f)/}"
     # Those will be used to upload to PyPI
-    cp "$f" "${AIRFLOW_SOURCES}/dist/${$(basename $f)/rc?/}"
+    cp "$f" "${AIRFLOW_SOURCES}/dist/${$(basename $f)/}"
 done
 svn commit -m "Release Airflow ${VERSION} from ${RC}"
 

[airflow] 18/29: Fixed parsing issue of _docker.env file for docker-compose v2 (#16950)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4781a59b85604505d9be6b80bb399bfccc509fe5
Author: Oleksandr Yarushevskyi <33...@users.noreply.github.com>
AuthorDate: Mon Jul 12 22:38:12 2021 +0300

    Fixed parsing issue of _docker.env file for docker-compose v2 (#16950)
    
    (cherry picked from commit 83cb237031dfe5b7cb5238cc1409ce71fd9507b7)
---
 scripts/ci/docker-compose/_docker.env | 96 +++++++++++++++++------------------
 1 file changed, 48 insertions(+), 48 deletions(-)

diff --git a/scripts/ci/docker-compose/_docker.env b/scripts/ci/docker-compose/_docker.env
index 6e8a5c5..c88300a 100644
--- a/scripts/ci/docker-compose/_docker.env
+++ b/scripts/ci/docker-compose/_docker.env
@@ -14,51 +14,51 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-AIRFLOW_CI_IMAGE
-AIRFLOW_EXTRAS
-BACKEND
-BREEZE
-CI
-CI_BUILD_ID
-CI_JOB_ID
-CI_EVENT_TYPE
-CI_TARGET_REPO
-CI_TARGET_BRANCH
-COMMIT_SHA
-DB_RESET
-DEFAULT_CONSTRAINTS_BRANCH
-ENABLED_INTEGRATIONS
-ENABLED_SYSTEMS
-GITHUB_ACTIONS
-GITHUB_REGISTRY_PULL_IMAGE_TAG
-HOST_USER_ID
-HOST_GROUP_ID
-HOST_OS
-HOST_HOME
-INIT_SCRIPT_FILE
-INSTALL_AIRFLOW_VERSION
-GENERATE_CONSTRAINTS_MODE
-INSTALL_PROVIDERS_FROM_SOURCES
-USE_AIRFLOW_VERSION
-USE_PACKAGES_FROM_DIST
-ISSUE_ID
-LOAD_DEFAULT_CONNECTIONS
-LOAD_EXAMPLES
-MYSQL_VERSION
-NUM_RUNS
-PACKAGE_FORMAT
-POSTGRES_VERSION
-PRINT_INFO_FROM_SCRIPTS
-PYTHONDONTWRITEBYTECODE
-PYTHON_MAJOR_MINOR_VERSION
-RUN_TESTS
-RUN_INTEGRATION_TESTS
-RUN_SYSTEM_TESTS
-START_AIRFLOW
-TEST_TYPE
-UPGRADE_TO_NEWER_DEPENDENCIES
-VERBOSE
-VERBOSE_COMMANDS
-VERSION_SUFFIX_FOR_PYPI
-VERSION_SUFFIX_FOR_SVN
-WHEEL_VERSION
+AIRFLOW_CI_IMAGE=
+AIRFLOW_EXTRAS=
+BACKEND=
+BREEZE=
+CI=
+CI_BUILD_ID=
+CI_JOB_ID=
+CI_EVENT_TYPE=
+CI_TARGET_REPO=
+CI_TARGET_BRANCH=
+COMMIT_SHA=
+DB_RESET=
+DEFAULT_CONSTRAINTS_BRANCH=
+ENABLED_INTEGRATIONS=
+ENABLED_SYSTEMS=
+GITHUB_ACTIONS=
+GITHUB_REGISTRY_PULL_IMAGE_TAG=
+HOST_USER_ID=
+HOST_GROUP_ID=
+HOST_OS=
+HOST_HOME=
+INIT_SCRIPT_FILE=
+INSTALL_AIRFLOW_VERSION=
+GENERATE_CONSTRAINTS_MODE=
+INSTALL_PROVIDERS_FROM_SOURCES=
+USE_AIRFLOW_VERSION=
+USE_PACKAGES_FROM_DIST=
+ISSUE_ID=
+LOAD_DEFAULT_CONNECTIONS=
+LOAD_EXAMPLES=
+MYSQL_VERSION=
+NUM_RUNS=
+PACKAGE_FORMAT=
+POSTGRES_VERSION=
+PRINT_INFO_FROM_SCRIPTS=
+PYTHONDONTWRITEBYTECODE=
+PYTHON_MAJOR_MINOR_VERSION=
+RUN_TESTS=
+RUN_INTEGRATION_TESTS=
+RUN_SYSTEM_TESTS=
+START_AIRFLOW=
+TEST_TYPE=
+UPGRADE_TO_NEWER_DEPENDENCIES=
+VERBOSE=
+VERBOSE_COMMANDS=
+VERSION_SUFFIX_FOR_PYPI=
+VERSION_SUFFIX_FOR_SVN=
+WHEEL_VERSION=

[airflow] 14/29: Avoid verification of images multiple times (#16928)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 7c1ce3a83c15ea58b4318a9c963f853ffb937683
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sun Jul 11 13:27:29 2021 +0200

    Avoid verification of images multiple times (#16928)
    
    The CI and PROD images are verified in CI build, and it takes
    a minute or so, however they were verified multiple times.
    
    Some time ago verification was added to be run then "wait for images"
    job was run, but the same script has been run in every test. This is
    not needed because this is the exact same image downloaded from
    GitHub registry (identified by commit hash), so the extra
    verificatoins are not needed. This will speed up the builds by
    few percents.
    
    (cherry picked from commit 7354d38cba9fbba3c22981012633d86a48941f92)
---
 .github/workflows/ci.yml                               | 17 ++++++++++++-----
 scripts/ci/images/ci_wait_for_and_verify_ci_image.sh   | 10 ++++------
 scripts/ci/images/ci_wait_for_and_verify_prod_image.sh |  4 +++-
 3 files changed, 19 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 6763886..609b1c4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -284,16 +284,16 @@ jobs:
           Wait for CI images
           ${{ needs.build-info.outputs.pythonVersions }}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}
         id: wait-for-images
-        env:
-          CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
-            ${{needs.build-info.outputs.pythonVersionsListAsString}}
         # We wait for the images to be available either from the build-ci-image step or from
         # "build-images-workflow-run.yml' run as pull_request_target.
         # We are utilising single job to wait for all images because this job merely waits
         # for the images to be available.
         # The test jobs wait for it to complete if WAIT_FOR_IMAGE is 'true'!
         run: ./scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh
-
+        env:
+          CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
+            ${{needs.build-info.outputs.pythonVersionsListAsString}}
+          VERIFY_IMAGE: "true"
 
   static-checks:
     timeout-minutes: 30
@@ -889,10 +889,11 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         # For the images to be available. The test jobs wait for it to complete!
         #
         id: wait-for-images
+        run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
         env:
           CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
             ${{needs.build-info.outputs.pythonVersionsListAsString}}
-        run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
+          VERIFY_IMAGE: "true"
 
   tests-kubernetes:
     timeout-minutes: 50
@@ -931,6 +932,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         run: ./scripts/ci/tools/ci_free_space_on_ci.sh
       - name: "Get all PROD images"
         run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
+        env:
+          VERIFY_IMAGE: "false"
       - name: "Cache virtualenv for kubernetes testing"
         uses: actions/cache@v2
         with:
@@ -994,6 +997,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         run: ./scripts/ci/tools/ci_free_space_on_ci.sh
       - name: "Get all PROD images"
         run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
+        env:
+          VERIFY_IMAGE: "false"
       - name: "Cache virtualenv for kubernetes testing"
         uses: actions/cache@v2
         with:
@@ -1165,6 +1170,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           Wait for CI images
           ${{ needs.build-info.outputs.pythonVersions }}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}
         run: ./scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh
+        env:
+          VERIFY_IMAGE: "false"
       - name: "Generate constraints with PyPI providers"
         run: ./scripts/ci/constraints/ci_generate_all_constraints.sh
         env:
diff --git a/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh b/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
index b86a8f0..9047665 100755
--- a/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
+++ b/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh
@@ -45,14 +45,12 @@ start_end::group_end
 export AIRFLOW_CI_IMAGE_NAME="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci"
 
 start_end::group_start "Waiting for ${AIRFLOW_CI_IMAGE_NAME} image to appear"
-
 push_pull_remove_images::wait_for_github_registry_image \
     "${AIRFLOW_CI_IMAGE_NAME}${GITHUB_REGISTRY_IMAGE_SUFFIX}" "${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
-
 build_images::prepare_ci_build
-
 pull_ci_image
-
-verify_image::verify_ci_image "${AIRFLOW_CI_IMAGE}"
-
 start_end::group_end
+
+if [[ ${VERIFY_IMAGE=} != "false" ]]; then
+    verify_image::verify_ci_image "${AIRFLOW_CI_IMAGE}"
+fi
diff --git a/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh b/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
index 6215147..92dbf2b 100755
--- a/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
+++ b/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh
@@ -49,4 +49,6 @@ verbosity::print_info "Pulling the ${image_name_with_tag} image and tagging with
 push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_IMAGE}" "${image_name_with_tag}"
 start_end::group_end
 
-verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}"
+if [[ ${VERIFY_IMAGE=} != "false" ]]; then
+    verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}"
+fi

[airflow] 11/29: Removes coverage from kubernetes tests (#16794)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit cf497c03b58d3ca67d0dffde6ee487564770eadc
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sat Jul 3 17:57:22 2021 +0200

    Removes coverage from kubernetes tests (#16794)
    
    The coverage generated by parallel runs of K8S tests cause often
    failures of tests because temporary .coverage files were generated
    in airflow sources. However the coverage of those tests was actually
    wrong - it did not check the coverage of Airflow code (it is
    running inside K8S in scheduler/workers/webserver pods).
    
    This is quite a bit complex task captured in #16793 but for now we
    should simply disable the coverage for those tests.
    
    (cherry picked from commit 5399f9124a4e75c7bb89e47c267d89b5280060ad)
---
 .github/workflows/ci.yml                         | 65 ++++++++++++++++++++++--
 scripts/ci/kubernetes/ci_run_kubernetes_tests.sh |  9 ++--
 2 files changed, 65 insertions(+), 9 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 8b2735c..57ab690 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -975,15 +975,72 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         if: failure()
         with:
           name: >
-            kind-logs-
+            kind-logs-${{matrix.executor}}
           path: /tmp/kind_logs_*
           retention-days: 7
-      - name: "Upload artifact for coverage"
+
+  tests-helm-executor-upgrade:
+    timeout-minutes: 50
+    name: Helm Chart Executor Upgrade
+    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
+    needs: [build-info, prod-images]
+
+    env:
+      RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
+      BACKEND: postgres
+      RUN_TESTS: "true"
+      RUNTIME: "kubernetes"
+      KUBERNETES_MODE: "image"
+      EXECUTOR: "KubernetesExecutor"
+      KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}"
+      HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}"
+      GITHUB_REGISTRY: ${{ needs.prod-images.outputs.githubRegistry }}
+      CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
+        ${{needs.build-info.outputs.pythonVersionsListAsString}}
+      CURRENT_KUBERNETES_VERSIONS_AS_STRING: >
+        ${{needs.build-info.outputs.kubernetesVersionsListAsString}}
+    if: needs.build-info.outputs.run-kubernetes-tests == 'true'
+    steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          persist-credentials: false
+      - name: "Setup python"
+        uses: actions/setup-python@v2
+        with:
+          python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
+      - name: "Free space"
+        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
+      - name: "Get all PROD images"
+        run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh
+      - name: "Cache virtualenv for kubernetes testing"
+        uses: actions/cache@v2
+        with:
+          path: ".build/.kubernetes_venv"
+          key: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}\
+  -${{needs.build-info.outputs.kubernetesVersionsListAsString}}
+  -${{needs.build-info.outputs.pythonVersionsListAsString}}
+  -${{ hashFiles('setup.py','setup.cfg') }}"
+          restore-keys: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}-\
+  -${{needs.build-info.outputs.kubernetesVersionsListAsString}}
+  -${{needs.build-info.outputs.pythonVersionsListAsString}}"
+      - name: "Cache bin folder with tools for kubernetes testing"
+        uses: actions/cache@v2
+        with:
+          path: ".build/kubernetes-bin"
+          key: "kubernetes-binaries
+  -${{ needs.build-info.outputs.defaultKindVersion }}\
+  -${{ needs.build-info.outputs.defaultHelmVersion }}"
+          restore-keys: "kubernetes-binaries"
+      - name: "Kubernetes Helm Chart Executor Upgrade Tests"
+        run: ./scripts/ci/kubernetes/ci_upgrade_cluster_with_different_executors_in_parallel.sh
+      - name: "Upload KinD logs"
         uses: actions/upload-artifact@v2
+        if: failure()
         with:
           name: >
-            coverage-k8s-
-          path: "./files/coverage*.xml"
+            kind-logs-KubernetesExecutor
+          path: /tmp/kind_logs_*
           retention-days: 7
 
   push-prod-images-to-github-registry:
diff --git a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
index 487b16b..6cab719 100755
--- a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
+++ b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh
@@ -18,6 +18,8 @@
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
+: "${EXECUTOR:?You must set EXECUTOR to one of 'KubernetesExecutor', 'CeleryExecutor', 'CeleryKubernetesExecutor' }"
+
 kind::make_sure_kubernetes_tools_are_installed
 kind::get_kind_cluster_name
 
@@ -60,9 +62,6 @@ function parse_tests_to_run() {
             "--verbosity=1"
             "--strict-markers"
             "--durations=100"
-            "--cov=airflow/"
-            "--cov-config=.coveragerc"
-            "--cov-report=xml:files/coverage=${KIND_CLUSTER_NAME}.xml"
             "--color=yes"
             "--maxfail=50"
             "--pythonwarnings=ignore::DeprecationWarning"
@@ -76,7 +75,7 @@ function create_virtualenv() {
     HOST_PYTHON_VERSION=$(python3 -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")')
     readonly HOST_PYTHON_VERSION
 
-    local virtualenv_path="${BUILD_CACHE_DIR}/.kubernetes_venv/${KIND_CLUSTER_NAME}_host_python_${HOST_PYTHON_VERSION}"
+    local virtualenv_path="${BUILD_CACHE_DIR}/.kubernetes_venv/${KIND_CLUSTER_NAME}_host_python_${HOST_PYTHON_VERSION}_${EXECUTOR}"
 
     mkdir -pv "${BUILD_CACHE_DIR}/.kubernetes_venv/"
     if [[ ! -d ${virtualenv_path} ]]; then
@@ -90,7 +89,7 @@ function create_virtualenv() {
 
     pip install --upgrade "pip==${AIRFLOW_PIP_VERSION}" "wheel==${WHEEL_VERSION}"
 
-    pip install pytest freezegun pytest-cov \
+    pip install pytest freezegun \
       --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${HOST_PYTHON_VERSION}.txt"
 
     pip install -e ".[cncf.kubernetes,postgres]" \

[airflow] 12/29: Remove legacy GitHub Packages (#16776)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 03a23e015122641f7a6c459ac20505e91321d1df
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sun Jul 4 13:44:51 2021 +0200

    Remove legacy GitHub Packages (#16776)
    
    This PR removes the legacy GitHub Packages support:
    * removes checking for images in Packages/Registry
    * removes output informing about the registry
    * hard-codes registry to ghcr.io
    * Updaes documentation describing the registries
    
    (cherry picked from commit b251d22fffad63124eec5246b80035408b543704)
---
 .github/workflows/build-images.yml               |   1 -
 .github/workflows/ci.yml                         |  33 +------
 BREEZE.rst                                       |  30 ------
 CI.rst                                           |  44 +++------
 IMAGES.rst                                       | 113 +++++------------------
 breeze                                           |  69 +++++---------
 breeze-complete                                  |   6 --
 scripts/ci/libraries/_initialization.sh          |   3 +-
 scripts/ci/libraries/_push_pull_remove_images.sh |  36 --------
 9 files changed, 65 insertions(+), 270 deletions(-)

diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml
index 9097241..9cbae1d 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -47,7 +47,6 @@ env:
   GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest"
   GITHUB_REGISTRY_WAIT_FOR_IMAGE: "false"
   INSTALL_PROVIDERS_FROM_SOURCES: "true"
-  GITHUB_REGISTRY: "ghcr.io"
   TARGET_COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
 
 concurrency:
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 57ab690..6763886 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -37,7 +37,6 @@ env:
   VERBOSE: "true"
   DOCKER_CACHE: "pulled"
   USE_GITHUB_REGISTRY: "true"
-  GITHUB_REGISTRY: "ghcr.io"
   GITHUB_REPOSITORY: ${{ github.repository }}
   GITHUB_USERNAME: ${{ github.actor }}
   # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the
@@ -266,8 +265,6 @@ jobs:
       BACKEND: sqlite
       UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
       WAIT_FOR_IMAGE: ${{ needs.build-info.outputs.waitForImage }}
-    outputs:
-      githubRegistry: ${{ steps.wait-for-images.outputs.githubRegistry }}
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -295,9 +292,6 @@ jobs:
         # We are utilising single job to wait for all images because this job merely waits
         # for the images to be available.
         # The test jobs wait for it to complete if WAIT_FOR_IMAGE is 'true'!
-        # The job will set the output "githubRegistry" - result of auto-detect which registry has
-        # been used by checking where the image can be downloaded from.
-        #
         run: ./scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh
 
 
@@ -311,7 +305,6 @@ jobs:
       SKIP: "identity"
       MOUNT_SELECTED_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: needs.build-info.outputs.basic-checks-only == 'false'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
@@ -417,7 +410,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     if: needs.build-info.outputs.docs-build == 'true'
     env:
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -465,7 +457,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       AIRFLOW_EXTRAS: "all"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
       VERSION_SUFFIX_FOR_PYPI: ".dev0"
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
       NON_INTERACTIVE: "true"
       GENERATE_PROVIDERS_ISSUE: "true"
     if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'main'
@@ -514,7 +505,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       AIRFLOW_EXTRAS: "all"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
       VERSION_SUFFIX_FOR_PYPI: ".dev0"
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
       NON_INTERACTIVE: "true"
       GENERATE_PROVIDERS_ISSUE: "true"
     if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'main'
@@ -558,7 +548,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       BACKEND: ""
       DB_RESET: "false"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: >
       needs.build-info.outputs.needs-helm-tests == 'true' &&
       (github.repository == 'apache/airflow' || github.event_name != 'schedule')
@@ -618,7 +607,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       POSTGRES_VERSION: ${{ matrix.postgres-version }}
       TEST_TYPES: "${{needs.build-info.outputs.testTypes}}"
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: needs.build-info.outputs.run-tests == 'true'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
@@ -675,7 +663,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       MYSQL_VERSION: ${{ matrix.mysql-version }}
       TEST_TYPES: "${{needs.build-info.outputs.testTypes}}"
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: needs.build-info.outputs.run-tests == 'true'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
@@ -729,7 +716,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       BACKEND: sqlite
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       TEST_TYPES: "${{needs.build-info.outputs.testTypes}}"
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: needs.build-info.outputs.run-tests == 'true'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
@@ -781,7 +767,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       TEST_TYPES: "Quarantined"
       NUM_RUNS: 10
       GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: needs.build-info.outputs.run-tests == 'true'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
@@ -880,8 +865,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       BACKEND: sqlite
       PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }}
       UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
-    outputs:
-      githubRegistry: ${{ steps.wait-for-images.outputs.githubRegistry }}
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -904,8 +887,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         # "build-images-workflow-run.yml' run as pull_request_target.
         # We are utilising single job to wait for all images because this job merely waits
         # For the images to be available. The test jobs wait for it to complete!
-        # The job will set the output "githubRegistry" - result of auto-detect which registry has
-        # been used by checking where the image can be downloaded from.
         #
         id: wait-for-images
         env:
@@ -930,12 +911,13 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       EXECUTOR: ${{matrix.executor}}
       KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}"
       HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}"
-      GITHUB_REGISTRY: ${{ needs.prod-images.outputs.githubRegistry }}
       CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
         ${{needs.build-info.outputs.pythonVersionsListAsString}}
       CURRENT_KUBERNETES_VERSIONS_AS_STRING: >
         ${{needs.build-info.outputs.kubernetesVersionsListAsString}}
-    if: needs.build-info.outputs.run-kubernetes-tests == 'true'
+    if: >
+      needs.build-info.outputs.run-kubernetes-tests == 'true' ||
+      needs.build-info.outputs.needs-helm-tests == 'true'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -994,7 +976,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       EXECUTOR: "KubernetesExecutor"
       KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}"
       HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}"
-      GITHUB_REGISTRY: ${{ needs.prod-images.outputs.githubRegistry }}
       CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: >
         ${{needs.build-info.outputs.pythonVersionsListAsString}}
       CURRENT_KUBERNETES_VERSIONS_AS_STRING: >
@@ -1058,7 +1039,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       - tests-kubernetes
       - prod-images
       - docs
-    # TODO: Generalize me (find a better way to select matching branches)
     if: >
       (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' ||
       github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test') &&
@@ -1070,7 +1050,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
-      GITHUB_REGISTRY: ${{ needs.prod-images.outputs.githubRegistry }}
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -1120,7 +1099,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       - tests-kubernetes
       - ci-images
       - docs
-    # TODO: Generalize me (find a better way to select matching branches)
     if: >
       (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' ||
       github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test') &&
@@ -1132,7 +1110,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
       GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -1167,10 +1144,8 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     env:
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
       CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: ${{needs.build-info.outputs.pythonVersionsListAsString}}
     # Only run it for direct pushes
-    # TODO: Generalize me (find a better way to select matching branches)
     if: >
       github.ref == 'refs/heads/main' || github.ref == 'refs/heads/v1-10-test' ||
       github.ref == 'refs/heads/v2-0-test' || github.ref == 'refs/heads/v2-1-test'
@@ -1264,8 +1239,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     name: React UI tests
     runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
     needs: [build-info, ci-images]
-    env:
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
     if: needs.build-info.outputs.run-ui-tests == 'true'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
diff --git a/BREEZE.rst b/BREEZE.rst
index d199367..0bb83fe 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1445,16 +1445,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
           DockerHub. You need to be logged in to the registry in order to be able to pull/push from
           and you need to be committer to push to Apache Airflow' GitHub registry.
 
-  --github-registry GITHUB_REGISTRY
-          GitHub registry used. GitHub has legacy Packages registry and Public Beta Container
-          registry.
-
-          Default: ghcr.io.
-
-          If you use this flag, automatically --use-github-registry flag is enabled.
-
-                 ghcr.io docker.pkg.github.com
-
   -g, --github-repository GITHUB_REPOSITORY
           GitHub repository used to pull, push images when cache is used.
           Default: apache/airflow.
@@ -1619,16 +1609,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
           DockerHub. You need to be logged in to the registry in order to be able to pull/push from
           and you need to be committer to push to Apache Airflow' GitHub registry.
 
-  --github-registry GITHUB_REGISTRY
-          GitHub registry used. GitHub has legacy Packages registry and Public Beta Container
-          registry.
-
-          Default: ghcr.io.
-
-          If you use this flag, automatically --use-github-registry flag is enabled.
-
-                 ghcr.io docker.pkg.github.com
-
   -g, --github-repository GITHUB_REPOSITORY
           GitHub repository used to pull, push images when cache is used.
           Default: apache/airflow.
@@ -2692,16 +2672,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
           DockerHub. You need to be logged in to the registry in order to be able to pull/push from
           and you need to be committer to push to Apache Airflow' GitHub registry.
 
-  --github-registry GITHUB_REGISTRY
-          GitHub registry used. GitHub has legacy Packages registry and Public Beta Container
-          registry.
-
-          Default: ghcr.io.
-
-          If you use this flag, automatically --use-github-registry flag is enabled.
-
-                 ghcr.io docker.pkg.github.com
-
   -g, --github-repository GITHUB_REPOSITORY
           GitHub repository used to pull, push images when cache is used.
           Default: apache/airflow.
diff --git a/CI.rst b/CI.rst
index 4b92d2d..dcc859d 100644
--- a/CI.rst
+++ b/CI.rst
@@ -54,7 +54,7 @@ it can be ~6-7 minutes and in case base image of Python releases new patch-level
 Container Registry used as cache
 --------------------------------
 
-For the CI builds of our we are using Container Registry to store results of the "Build Image" workflow
+For the CI builds of our we are using GitHub Container Registry to store results of the "Build Image" workflow
 and pass it to the "CI Build" workflow.
 
 Currently in main version of Airflow we run tests in 4 different versions of Python (3.6, 3.7, 3.8, 3.9)
@@ -70,40 +70,23 @@ This is especially important in our case where Pull Requests to Airflow might co
 and it would be a huge security issue if anyone from outside could
 utilise the WRITE access to Apache Airflow repository via an external Pull Request.
 
-Thanks to the WRITE access and fact that the 'pull_request_target' by default uses the 'main' version of the
+Thanks to the WRITE access and fact that the ``pull_request_target`` by default uses the ``main`` version of the
 sources, we can safely run some logic there will checkout the incoming Pull Request, build the container
 image from the sources from the incoming PR and push such image to an GitHub Docker Registry - so that
 this image can be built only once and used by all the jobs running tests. The image is tagged with unique
 ``COMMIT_SHA`` of the incoming Pull Request and the tests run in the Pull Request can simply pull such image
 rather than build it from the scratch. Pulling such image takes ~ 1 minute, thanks to that we are saving
 a lot of precious time for jobs.
+4
+We use `GitHub Container Registry <https://docs.github.com/en/packages/guides/about-github-container-registry>`_
+GitHub Package Registry ``GITHUB_TOKEN`` is needed to push to the registry. You also have to manually manage
+permissions of the images, after creating image for the first time (pushing it using your personal token)
+you need to set their visibility to ``Public`` and enable
+`Inheriting access from repository <https://docs.github.com/en/packages/learn-github-packages/configuring-a-packages-access-control-and-visibility#inheriting-access-for-a-container-image-from-a-repository>`_
+Those images have specific naming schema. See `Images documentation <IMAGES.rst>`_ for details.
 
-We can use either of the two available GitHub Container registries as cache:
-
-* Legacy `GitHub Package Registry <https://github.com/features/packages>`_ which is not very
-  stable, uses old infrastructure of GitHub and it lacks certain features - notably it does not allow
-  us to delete the old image. The benefit of using GitHub Package Registry is that it works
-  out-of-the-box (write authentication is done using ``GITHUB_TOKEN`` and users do not have to do any
-  action to make it work in case they want to run build using their own forks. Also those images
-  do not provide public access, so you need to login to ``docker.pkg.github.com`` docker registry
-  using your username and personal token to be able to pull those images.
-
-* The new `GitHub Container Registry <https://docs.github.com/en/packages/guides/about-github-container-registry>`_
-  which is in Public Beta, has many more features (including permission management, public access and
-  image retention possibility). Similarly as in case of GitHub Package Registry ``GITHUB_TOKEN`` is needed
-  to push to the repositories. You also have to manually manage permissions of the images,
-  i.e. after creating images for the first time, you need to set their visibility to ``Public`` and
-  add ``Admin`` permissions to group of people managing the images (in our case ``airflow-committers`` group).
-  This makes it not very suitable to use GitHub container registry if you want to run builds of Airflow
-  in your own forks (note - it does not affect pull requests from forks to Airflow).
-
-Those two images have different naming schemas. See `Images documentation <IMAGES.rst>`_ for details.
-
-You can interact with the GitHub Registry images (pull/push) via `Breeze <BREEZE.rst>`_  - you can
-pass ``--github-registry`` flag with either ``docker.pkg.github.com`` for GitHub Package Registry or
-``ghcr.io`` for GitHub Container Registry and pull/push operations will be performed using the chosen
-registry, using appropriate naming convention. This allows building and pushing the images locally by
-committers who have access to push/pull those images.
+You can interact with the GitHub Registry images (pull/push) via `Breeze <BREEZE.rst>`_  - by passing
+``--use-github-registry`` flag.
 
 Locally replicating CI failures
 -------------------------------
@@ -787,7 +770,7 @@ cd27124534b46c9688a1d89e75fcd137ab5137e3, in python 3.8 environment you can run:
 
 .. code-block:: bash
 
-  ./breeze --github-image-id cd27124534b46c9688a1d89e75fcd137ab5137e3 --github-registry ghcr.io --python 3.8
+  ./breeze --github-image-id cd27124534b46c9688a1d89e75fcd137ab5137e3 --use=github-registry --python 3.8
 
 You will be dropped into a shell with the exact version that was used during the CI run and you will
 be able to run pytest tests manually, easily reproducing the environment that was used in CI. Note that in
@@ -848,8 +831,7 @@ In order to add a new version the following operations should be done (example u
 .. code-block:: bash
 
   ./breeze push-image --python 3.9
-  ./breeze push-image --python 3.9 --github-registry ghcr.io
-  ./breeze push-image --python 3.9 --github-registry docker.pkg.github.com
+  ./breeze push-image --python 3.9 --use-github-registry
 
 * Find the 3 new images (main, ci, build) created in
   `GitHub Container registry <https://github.com/orgs/apache/packages?tab=packages&ecosystem=container&q=airflow>`_
diff --git a/IMAGES.rst b/IMAGES.rst
index c3a1805..47ce39f 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -270,14 +270,11 @@ Choosing image registry
 =======================
 
 By default images are pulled and pushed from and to DockerHub registry when you use Breeze's push-image
-or build commands. But as described in `CI Documentation <CI.rst>`_, you can choose different image
-registry by setting ``GITHUB_REGISTRY`` to ``docker.pkg.github.com`` for GitHub Package Registry or
-``ghcr.io`` for GitHub Container Registry.
+or build commands.
 
-Default is the GitHub Package Registry one. The Pull Request forks have no access to the secret but they
-auto-detect the registry used when they wait for the images.
+But as described in `CI Documentation <CI.rst>`_, you can choose GitHub Container Registry.
 
-Our images are named following conventions below.
+Naming convention for DockerHub images.
 
 Images used during CI builds:
 
@@ -320,53 +317,16 @@ For example:
 You can see those CI DockerHub images at `<https://hub.docker.com/r/apache/airflow>`_
 
 
-Using GitHub registries as build cache
---------------------------------------
+Using GitHub Container Registry as build cache
+----------------------------------------------
 
-By default DockerHub registry is used when you push or pull such images.
-However for CI builds we keep the images in GitHub registry as well - this way we can easily push
-the images automatically after merge requests and use such images for Pull Requests
-as cache - which makes it much it much faster for CI builds (images are available in cache
-right after merged request in main finishes it's build), The difference is visible especially if
-significant changes are done in the Dockerfile.CI.
-
-The images are named differently (in Docker definition of image names - registry URL is part of the
-image name if DockerHub is not used as registry). Also GitHub has its own structure for registries
-each project has its own registry naming convention that should be followed. The name of
-images for GitHub registry are different as they must follow limitation of the registry used.
-
-We are still using GitHub Packages as registry, but we are in the process of testing and switching
-to GitHub Container Registry, and the naming conventions are slightly different (GitHub Packages
-required all packages to have "organization/repository/" URL prefix ("apache/airflow/",
-where in GitHub Container Registry, all images are in "organization" not in "repository" and they are all
-in organization wide "apache/" namespace rather than in "apache/airflow/" one).
-We are adding "airflow-" as prefix for image names of all Airflow images instead.
+We are using GitHub Container Registry as build cache.The images are all in organization wide "apache/"
+namespace. We are adding "airflow-" as prefix for image names of all Airflow images.
 The images are linked to the repository via ``org.opencontainers.image.source`` label in the image.
 
-Naming convention for GitHub Packages
--------------------------------------
-
-Images with a commit SHA (built for pull requests and pushes)
-
-.. code-block:: bash
-
-  docker.pkg.github.com/apache-airflow/<BRANCH>-pythonX.Y-ci-v2:<COMMIT_SHA> - for CI images
-  docker.pkg.github.com/apache-airflow/<BRANCH>-pythonX.Y-v2:<COMMIT_SHA>       - for production images
-  docker.pkg.github.com/apache-airflow/<BRANCH>-pythonX.Y-build-v2:<COMMIT_SHA> - for production build stage
-  docker.pkg.github.com/apache-airflow/python-v2:X.Y-slim-buster-<COMMIT_SHA>   - for base Python images
-
-Latest images (pushed when main merge succeeds):
-
-.. code-block:: bash
-
-  docker.pkg.github.com/apache/airflow/<BRANCH>-pythonX.Y-ci-v2:latest    - for CI images
-  docker.pkg.github.com/apache/airflow/<BRANCH>-pythonX.Y-v2:latest       - for production images
-  docker.pkg.github.com/apache/airflow/<BRANCH>-pythonX.Y-build-v2:latest - for production build stage
-  docker.pkg.github.com/apache/airflow/python-v2:X.Y-slim-buster          - for base Python images
-
+See https://docs.github.com/en/packages/learn-github-packages/connecting-a-repository-to-a-package
 
-Naming convention for GitHub Container Registry
------------------------------------------------
+Naming convention for the GitHub packages.
 
 Images with a commit SHA (built for pull requests and pushes)
 
@@ -386,40 +346,28 @@ Latest images (pushed when main merge succeeds):
   ghcr.io/apache/airflow-<BRANCH>-pythonX.Y-build-v2:latest - for production build stage
   ghcr.io/apache/airflow-python-v2:X.Y-slim-buster          - for base Python images
 
-Note that we never push or pull "release" images to GitHub registry. It is only used for CI builds
-
+Note that we never push or pull "release" images to GitHub registry. Those are only pushed to DockerHub.
 You can see all the current GitHub images at `<https://github.com/apache/airflow/packages>`_
 
-
-In order to interact with the GitHub images you need to add ``--use-github-registry`` flag to the pull/push
-commands in Breeze. This way the images will be pulled/pushed from/to GitHub rather than from/to
-DockerHub. Images are build locally as ``apache/airflow`` images but then they are tagged with the right
-GitHub tags for you. You can also specify ``--github-registry`` option and choose which of the
-GitHub registries are used (``docker.pkg.github.com`` chooses GitHub Packages and ``ghcr.io`` chooses
-GitHub Container Registry).
+In order to interact with the GitHub Container Registry you need to add ``--use-github-registry``
+flag to the pull/push commands in Breeze. This way the images will be pulled/pushed from/to GitHub
+rather than from/to DockerHub. Images are build locally as ``apache/airflow`` images but then they are
+tagged with the right GitHub tags for you automatically.
 
 You can read more about the CI configuration and how CI builds are using DockerHub/GitHub images
 in `<CI.rst>`_.
 
 Note that you need to be committer and have the right to push to DockerHub and GitHub and you need to
-be logged in. Only committers can push images directly. You need to login with your
-Personal Access Token with "packages" scope to be able to push to those repositories or pull from them
+be logged in to the registry. Only committers can push images directly. You need to login with your
+Personal Access Token with "packages" write scope to be able to push to those repositories or pull from them
 in case of GitHub Packages.
 
-GitHub Packages:
-
-.. code-block:: bash
-
-  docker login docker.pkg.github.com
-
 GitHub Container Registry
 
 .. code-block:: bash
 
   docker login ghcr.io
 
-Interacting with container registries
-=====================================
 
 Since there are different naming conventions used for Airflow images and there are multiple images used,
 `Breeze <BREEZE.rst>`_ provides easy to use management interface for the images. The
@@ -431,23 +379,12 @@ to refresh them.
 This can be done with ``Breeze`` command line which has easy-to-use tool to manage those images. For
 example:
 
-
 Force building Python 3.6 CI image using local cache and pushing it container registry:
 
 .. code-block:: bash
 
-  ./breeze build-image --python 3.6 --force-build-images --build-cache-local
-  ./breeze push-image --python 3.6 --github-registry ghcr.io
-
-
-Building Python 3.7 PROD images (both build and final image) using cache pulled
-from ``docker.pkg.github.com`` and pushing it back:
-
-.. code-block:: bash
-
-  ./breeze build-image --production-image --python 3.7 --github-registry docker.pkg.github.com
-  ./breeze push-image --production-image --python 3.7 --github-registry docker.pkg.github.com
-
+  ./breeze build-image --python 3.6 --force-build-images --force-pull-base-python-image --build-cache-local
+  ./breeze push-image --python 3.6 --use-github-registry
 
 Building Python 3.8 CI image using cache pulled from DockerHub and pushing it back:
 
@@ -461,13 +398,13 @@ tool that allows to reproduce CI failures locally, enter the images and fix them
 to pass ``--github-image-id`` and the registry and Breeze will download and execute commands using
 the same image that was used during the CI build.
 
-For example this command will run the same Python 3.8 image as was used in 210056909
-run with enabled Kerberos integration (assuming docker.pkg.github.com was used as build cache).
+For example this command will run the same Python 3.8 image as was used in build identified with
+9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e commit SHA  with enabled Kerberos integration.
 
 .. code-block:: bash
 
-  ./breeze --github-image-id 210056909 \
-    --github-registry docker.pkg.github.com \
+  ./breeze --github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e \
+    --use-github-registry \
     --python 3.8 --integration kerberos
 
 You can see more details and examples in `Breeze <BREEZE.rst>`_
@@ -774,11 +711,9 @@ GitHub Registies in order to be able to do that.
             ./breeze build-image --python ${python_version} --build-cache-local \
                     --production-image --verbose
             ./breeze push-image
-            ./breeze push-image --github-registry ghcr.io
-            ./breeze push-image --github-registry docker.pkg.github.com
+            ./breeze push-image --use-github-registry
             ./breeze push-image --production-image
-            ./breeze push-image --github-registry ghcr.io --production-image
-            ./breeze push-image --github-registry docker.pkg.github.com --production-image
+            ./breeze push-image --production-image --use-github-registry
     done
 
 Running the CI image
diff --git a/breeze b/breeze
index 9a352fd..6240d07 100755
--- a/breeze
+++ b/breeze
@@ -572,6 +572,7 @@ EOF
 #   BACKEND
 #   AIRFLOW_VERSION
 #   INSTALL_AIRFLOW_VERSION
+#   SSH_PORT
 #   WEBSERVER_HOST_PORT
 #   POSTGRES_HOST_PORT
 #   POSTGRES_VERSION
@@ -620,6 +621,7 @@ export PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}"
 export BACKEND="${BACKEND}"
 export AIRFLOW_VERSION="${AIRFLOW_VERSION}"
 export INSTALL_AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
+export SSH_PORT="${SSH_PORT}"
 export WEBSERVER_HOST_PORT="${WEBSERVER_HOST_PORT}"
 export FLOWER_HOST_PORT="${FLOWER_HOST_PORT}"
 export REDIS_HOST_PORT="${REDIS_HOST_PORT}"
@@ -1141,15 +1143,6 @@ function breeze::parse_arguments() {
             export USE_GITHUB_REGISTRY="true"
             shift
             ;;
-        --github-registry)
-            echo
-            echo "Using GitHub registry."
-            echo "GitHub registry used: ${2}"
-            echo
-            export GITHUB_REGISTRY="${2}"
-            export USE_GITHUB_REGISTRY="true"
-            shift 2
-            ;;
         -g | --github-repository)
             echo
             echo "Using GitHub registry."
@@ -1536,6 +1529,12 @@ function breeze::parse_arguments() {
 #
 #######################################################################################################
 function breeze::prepare_formatted_versions() {
+    if [[ -n "${_breeze_formatted_versions_prepared:-}" ]]; then
+        return
+    fi
+
+    _breeze_formatted_versions_prepared=1
+
     local indent=15
     local list_prefix
     list_prefix=$(printf "%-${indent}s" " ")
@@ -1593,10 +1592,6 @@ function breeze::prepare_formatted_versions() {
         tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
     readonly FORMATTED_GENERATE_CONSTRAINTS_MODE
 
-    FORMATTED_GITHUB_REGISTRY=$(echo "${_breeze_allowed_github_registrys=""}" |
-        tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
-    readonly FORMATTED_GITHUB_REGISTRY
-
     FORMATTED_POSTGRES_VERSIONS=$(echo "${_breeze_allowed_postgres_versions=""}" |
         tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
     readonly FORMATTED_POSTGRES_VERSIONS
@@ -1641,6 +1636,14 @@ function breeze::prepare_formatted_versions() {
 # shellcheck disable=SC2034,SC2090,SC2089,SC2155
 
 function breeze::prepare_usage() {
+    if [[ -n "${_breeze_usage_prepared:-}" ]]; then
+        return
+    fi
+
+    _breeze_usage_prepared=1
+
+    breeze::prepare_formatted_versions
+
     # Note that MacOS uses Bash 3.* and we cannot use associative arrays
     export USAGE_SHELL="[Default] Enters interactive shell in the container"
     readonly USAGE_SHELL
@@ -2133,6 +2136,7 @@ function breeze::get_variable_from_lowercase_name() {
 #    usage information for the command.
 #######################################################################################################
 function breeze::get_usage() {
+    breeze::prepare_usage
     breeze::get_variable_from_lowercase_name "USAGE" "${1}"
 }
 
@@ -2145,6 +2149,7 @@ function breeze::get_usage() {
 #    Detailed usage information for the command.
 #######################################################################################################
 function breeze::get_detailed_usage() {
+    breeze::prepare_usage
     breeze::get_variable_from_lowercase_name "DETAILED_USAGE" "${1}"
 }
 
@@ -2162,6 +2167,7 @@ function breeze::get_detailed_usage() {
 #    General usage information for all commands.
 #######################################################################################################
 function breeze::usage() {
+    breeze::prepare_usage
     echo "
 
 usage: ${CMDNAME} [FLAGS] [COMMAND] -- <EXTRA_ARGS>
@@ -2804,16 +2810,6 @@ function breeze::flag_pull_push_docker_images() {
         DockerHub. You need to be logged in to the registry in order to be able to pull/push from
         and you need to be committer to push to Apache Airflow' GitHub registry.
 
---github-registry GITHUB_REGISTRY
-        GitHub registry used. GitHub has legacy Packages registry and Public Beta Container
-        registry.
-
-        Default: ${_breeze_default_github_registry:=}.
-
-        If you use this flag, automatically --use-github-registry flag is enabled.
-
-${FORMATTED_GITHUB_REGISTRY}
-
 -g, --github-repository GITHUB_REPOSITORY
         GitHub repository used to pull, push images when cache is used.
         Default: ${_breeze_default_github_repository:=}.
@@ -2919,6 +2915,8 @@ ${FORMATTED_TEST_TYPES}
 #    Flag information.
 #######################################################################################################
 function breeze::flags() {
+    breeze::prepare_formatted_versions
+
     echo "
 $(breeze::print_line)
 
@@ -3082,18 +3080,6 @@ function breeze::read_saved_environment_variables() {
 
     MYSQL_VERSION="${MYSQL_VERSION:=$(parameters::read_from_file MYSQL_VERSION)}"
     MYSQL_VERSION=${MYSQL_VERSION:=${_breeze_default_mysql_version}}
-
-    # Here you read DockerHub user/account that you use
-    # You can populate your own images in DockerHub this way and work with the,
-    # You can override it with "--dockerhub-user" option and it will be stored in .build directory
-    DOCKERHUB_USER="${DOCKERHUB_USER:=$(parameters::read_from_file DOCKERHUB_USER)}"
-    DOCKERHUB_USER="${DOCKERHUB_USER:=${_breeze_default_dockerhub_user}}"
-
-    # Here you read DockerHub repo that you use
-    # You can populate your own images in DockerHub this way and work with them
-    # You can override it with "--dockerhub-repo" option and it will be stored in .build directory
-    DOCKERHUB_REPO="${DOCKERHUB_REPO:=$(parameters::read_from_file DOCKERHUB_REPO)}"
-    DOCKERHUB_REPO="${DOCKERHUB_REPO:=${_breeze_default_dockerhub_repo}}"
 }
 
 #######################################################################################################
@@ -3149,15 +3135,9 @@ function breeze::check_and_save_all_params() {
     parameters::check_and_save_allowed_param "EXECUTOR" "Executors" "--executor"
     parameters::check_and_save_allowed_param "POSTGRES_VERSION" "Postgres version" "--postgres-version"
     parameters::check_and_save_allowed_param "MYSQL_VERSION" "Mysql version" "--mysql-version"
-    parameters::check_and_save_allowed_param "GITHUB_REGISTRY" "GitHub Registry" "--github-registry"
 
     parameters::check_allowed_param TEST_TYPE "Type of tests" "--test-type"
     parameters::check_allowed_param PACKAGE_FORMAT "Format of packages to build" "--package-format"
-
-
-    # Can't verify those - they can be anything, so let's just save them
-    parameters::save_to_file DOCKERHUB_USER
-    parameters::save_to_file DOCKERHUB_REPO
 }
 
 #######################################################################################################
@@ -3167,6 +3147,7 @@ function breeze::check_and_save_all_params() {
 # Used global constants:
 #
 #     AIRFLOW_SOURCES
+#     SSH_PORT
 #     WEBSERVER_HOST_PORT
 #     POSTGRES_HOST_PORT
 #     MYSQL_HOST_PORT
@@ -3198,6 +3179,7 @@ function breeze::print_cheatsheet() {
         echo " Port forwarding:"
         echo
         echo "   Ports are forwarded to the running docker containers for webserver and database"
+        echo "     * ${SSH_PORT} -> forwarded to Airflow ssh server -> airflow:22"
         echo "     * ${WEBSERVER_HOST_PORT} -> forwarded to Airflow webserver -> airflow:8080"
         echo "     * ${FLOWER_HOST_PORT} -> forwarded to Flower dashboard -> airflow:5555"
         echo "     * ${POSTGRES_HOST_PORT} -> forwarded to Postgres database -> postgres:5432"
@@ -3205,6 +3187,7 @@ function breeze::print_cheatsheet() {
         echo "     * ${REDIS_HOST_PORT} -> forwarded to Redis broker -> redis:6379"
         echo
         echo "   Here are links to those services that you can use on host:"
+        echo "     * ssh connection for remote debugging: ssh -p ${SSH_PORT} airflow@127.0.0.1 pw: airflow"
         echo "     * Webserver: http://127.0.0.1:${WEBSERVER_HOST_PORT}"
         echo "     * Flower:    http://127.0.0.1:${FLOWER_HOST_PORT}"
         echo "     * Postgres:  jdbc:postgresql://127.0.0.1:${POSTGRES_HOST_PORT}/airflow?user=postgres&password=airflow"
@@ -3653,10 +3636,6 @@ start_end::script_start
 
 traps::add_trap start_end::script_end EXIT
 
-breeze::prepare_formatted_versions
-
-breeze::prepare_usage
-
 set +u
 breeze::parse_arguments "${@}"
 
diff --git a/breeze-complete b/breeze-complete
index 65a3ee0..58af21b 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -27,8 +27,6 @@ _breeze_allowed_python_major_minor_versions="3.6 3.7 3.8 3.9"
 _breeze_allowed_backends="sqlite mysql postgres"
 _breeze_allowed_integrations="cassandra kerberos mongo openldap pinot rabbitmq redis statsd trino all"
 _breeze_allowed_generate_constraints_modes="source-providers pypi-providers no-providers"
-# registrys is good here even if it is not correct english. We are adding s automatically to all variables
-_breeze_allowed_github_registrys="ghcr.io docker.pkg.github.com"
 _breeze_allowed_kubernetes_modes="image"
 _breeze_allowed_kubernetes_versions="v1.20.2 v1.19.7 v1.18.15"
 _breeze_allowed_helm_versions="v3.2.4"
@@ -45,7 +43,6 @@ _breeze_allowed_installation_methods=". apache-airflow"
 {
     # Default values for the commands & flags used
     _breeze_default_backend=$(echo "${_breeze_allowed_backends}" | awk '{print $1}')
-    _breeze_default_github_registry=$(echo "${_breeze_allowed_github_registrys}" | awk '{print $1}')
     _breeze_default_generate_providers_mode=$(echo "${_breeze_allowed_generate_constraints_modes}" | awk '{print $1}')
     _breeze_default_kubernetes_mode=$(echo "${_breeze_allowed_kubernetes_modes}" | awk '{print $1}')
     _breeze_default_kubernetes_version=$(echo "${_breeze_allowed_kubernetes_versions}" | awk '{print $1}')
@@ -314,9 +311,6 @@ function breeze_complete::get_known_values_breeze() {
     --installation-method)
         _breeze_known_values="${_breeze_allowed_installation_methods}"
         ;;
-    --github-registry)
-        _breeze_known_values="${_breeze_allowed_github_registrys}"
-        ;;
     --generate-constraints-mode)
         _breeze_known_values="${_breeze_allowed_generate_constraints_modes}"
         ;;
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 2b5458e..9895d8d 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -544,9 +544,9 @@ function initialization::initialize_git_variables() {
 
 function initialization::initialize_github_variables() {
     # Defaults for interacting with GitHub
+    export GITHUB_REGISTRY="ghcr.io"
     export USE_GITHUB_REGISTRY=${USE_GITHUB_REGISTRY:="false"}
     export GITHUB_REGISTRY_IMAGE_SUFFIX=${GITHUB_REGISTRY_IMAGE_SUFFIX:="-v2"}
-    export GITHUB_REGISTRY=${GITHUB_REGISTRY:="ghcr.io"}
     export GITHUB_REGISTRY_WAIT_FOR_IMAGE=${GITHUB_REGISTRY_WAIT_FOR_IMAGE:="false"}
     export GITHUB_REGISTRY_PULL_IMAGE_TAG=${GITHUB_REGISTRY_PULL_IMAGE_TAG:="latest"}
     export GITHUB_REGISTRY_PUSH_IMAGE_TAG=${GITHUB_REGISTRY_PUSH_IMAGE_TAG:="latest"}
@@ -708,7 +708,6 @@ Production image build variables:
 Detected GitHub environment:
 
     USE_GITHUB_REGISTRY: '${USE_GITHUB_REGISTRY}'
-    GITHUB_REGISTRY: '${GITHUB_REGISTRY}'
     GITHUB_REPOSITORY: '${GITHUB_REPOSITORY}'
     GITHUB_USERNAME: '${GITHUB_USERNAME}'
     GITHUB_TOKEN: '${GITHUB_TOKEN}'
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index a6a1c5c..f8a1d3e 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -291,35 +291,6 @@ function push_pull_remove_images::push_prod_images() {
     fi
 }
 
-# waits for an image to be available in GitHub Packages. Should be run with `set +e`
-# the build automatically determines which registry to use based one the images available
-function push_pull_remove_images::check_for_image_in_github_packages() {
-    local github_repository_lowercase
-    github_repository_lowercase="$(echo "${GITHUB_REPOSITORY}" |tr '[:upper:]' '[:lower:]')"
-    local github_api_endpoint
-    github_api_endpoint="https://docker.pkg.github.com/v2/${github_repository_lowercase}"
-    local image_name_in_github_registry="${1}"
-    local image_tag_in_github_registry=${2}
-    local image_to_wait_for=${GITHUB_REPOSITORY}/${image_name_in_github_registry}:${image_tag_in_github_registry}
-    local github_api_call
-    github_api_call="${github_api_endpoint}/${image_name_in_github_registry}/manifests/${image_tag_in_github_registry}"
-    echo "GitHub Packages: checking for ${image_to_wait_for} via ${github_api_call}!"
-    http_status=$(curl --silent --output "${OUTPUT_LOG}" --write-out "%{http_code}" \
-        --connect-timeout 60  --max-time 60 \
-        -X GET "${github_api_call}" -u "${GITHUB_USERNAME}:${GITHUB_TOKEN}")
-    if [[ ${http_status} == "200" ]]; then
-        echo  "Image: ${image_to_wait_for} found in GitHub Packages: ${COLOR_GREEN}OK.  ${COLOR_RESET}"
-        echo "::set-output name=githubRegistry::docker.pkg.github.com"
-        echo
-        echo "Setting githubRegistry output to docker.pkg.github.com"
-        echo
-        return 0
-    else
-        cat "${OUTPUT_LOG}"
-        echo "${COLOR_YELLOW}Still waiting. Status code ${http_status}!${COLOR_RESET}"
-        return 1
-    fi
-}
 
 # waits for an image to be available in GitHub Container Registry. Should be run with `set +e`
 function push_pull_remove_images::check_for_image_in_github_container_registry() {
@@ -332,10 +303,6 @@ function push_pull_remove_images::check_for_image_in_github_container_registry()
     local res=$?
     if [[ ${res} == "0" ]]; then
         echo  "Image: ${image_to_wait_for} found in Container Registry: ${COLOR_GREEN}OK.${COLOR_RESET}"
-        echo
-        echo "Setting githubRegistry output to ghcr.io"
-        echo
-        echo "::set-output name=githubRegistry::ghcr.io"
         return 0
     else
         echo "${COLOR_YELLOW}Still waiting. Not found!${COLOR_RESET}"
@@ -352,9 +319,6 @@ function push_pull_remove_images::wait_for_github_registry_image() {
         if push_pull_remove_images::check_for_image_in_github_container_registry "${@}"; then
             break
         fi
-        if push_pull_remove_images::check_for_image_in_github_packages "${@}"; then
-            break
-        fi
         sleep 30
     done
     set -e