You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2021/01/21 20:19:25 UTC

[airflow] branch v1-10-test updated (d9c973b -> c8d7883)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a change to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git.


 discard d9c973b  Run "third party" github actions from submodules instead (#13514)
 discard 52d0f57  Disable persisting credentials in Github Action's checkout (#13389)
 discard 1c9e621  Also add codecov action to apache airflow repo (#13328)
 discard 7d12723  Switch to Apache-owned GitHub actions (#13327)
    omit 4180803  fixup! Include airflow/contrib/executors in the dist package
    omit 05b6464  Allow webserver to read pod logs directly (#12598)
    omit 9fefd7c  Update chart readme to remove astronomer references (#13210)
    omit 74ff3e8  Fix parenthesis preventing Keda ScaledObject creation (#13183)
    omit 1fd654a  Use new logging options on values.yaml (#13173)
    omit bb5c8e5  The default value in chart should be 2.0.0 (#13125)
    omit 21ac6c1  fixup! Include airflow/contrib/executors in the dist package
    omit 70036e7  Default python version is used when building image
    omit 33b2c82  Click should be limited for Python 2.7
    omit 2320984  Reset PIP version after eager upgrade (#13251)
    omit cc76857  Include airflow/contrib/executors in the dist package
    omit 1023a75  fixup! Production images on CI are now built from packages (#12685)
    omit 10101f0  Skip identity pre-commit in ci and always display it first
    omit 205c86c  Production images on CI are now built from packages (#12685)
    omit 2adb41f  Add identity pre-commit hook (#13089)
    omit 770ea3c  Allows to install Airflow in Breeze from PIP with configurable extras (#13055)
    omit 3eadf3a  fixup! fixup! Install airflow and providers from dist and verifies them  (#13033)
    omit 9cc5820  fixup! Install airflow and providers from dist and verifies them  (#13033)
    omit 54fc2f9  Fixes image building in DockerHub (#13039)
    omit a8f57d1  Install airflow and providers from dist and verifies them  (#13033)
    omit 72c0a83  Fix gpg verification command (#13035)
    omit 1cbff2f  Changes release image preparation to use PyPI packages (#12990)
    omit 91b32ad  Add changes from 1.10.14 (#12993)
    omit 0e924ba  Update Dockerfile (#12987)
    omit 8a5d378  Update Dockerfile.ci (#12988)
    omit 8f3ffcc  Update CI to run tests againt v2-0-test branch (#10891)
    omit d4a149d  Update CI to run tests againt v2-0-test branch (#10891)
    omit 9e283fb  Apply labels to Docker images in a single instruction (#12931)
    omit 4f50e00  Builds prod images on DockerHub from packages (#12908)
    omit f12a23c  Kubernetes worker pod doesn't use docker container entrypoint (#12766)
    omit a806229  Clarifies version args for installing 1.10 in Docker (#12875)
    omit 637425f  Adds airflow as viable docker command in official image (#12878)
    omit 4f5170e  Artifacts in Github Action have a short retention period (#12793)
    omit 09a4885  Fix chart jobs delete policy for improved idempotency (#12646)
    omit 7b777ba  User-friendly output of Breeze and CI scripts (#12735)
    omit f114663  Improve wording of selective checks comments (#12701)
    omit 641a334  Enable PIP check for both CI and PROD image (#12664)
    omit 465cdf0  Adds --no-rbac-ui flag for Breeze airflow 1.10 installation (#11315)
    omit 91df4f0  Requirements shoudl be removed. Sufficient time passed.
    omit e75deee  Bugfix: Unable to import Airflow plugins on Python 3.8 (#12859)
     add f666da6  Add possibility to check if upgrade check rule applies (#12981)
     add ad34838  Add upgrade check rule for unrecognized arguments to Operators (#12660)
     add e67b03b  Enable upgrade checker to be run via `python -m airflow.upgrade.checker` (#13009)
     add 9b1759c  Check that needed (backport) providers are installed (#12902)
     add dd58fe6  Don't suggest change to users that will break dags on 1.1o.x (#13012)
     add 539be00  Prepare for release 1.1.0 of apache-airflow-upgrade_check (#13013)
     add 2e1f813  Add DBApiHook check for 2.0 migration (#12730)
     add 01524bb  Improve compatibility with sphinx_airflow_theme (#13002)
     add 3869ed2  Do not allow yanked version of Airflow in upgrade-check tool (#13160)
     add a1f5b08  Fix "airflow db upgrade" to upgrade db as intended (#13267)
     add 022f9e2  Moved boto3 limitation to snowflake (#13286)
     add 748d05f  Fix Jinja undefined upgrade check for non-templated objects (#13373)
     add e6eb5e7  Add upgrade check option to list checks (#13392)
     add 8ea0a38  Update URL for docs (#13561)
     add ee88f5f  KubernetesExecutor should accept images from executor_config (#13074)
     add fb84ebe  Created CustomExecutorsRequireFullPathRule class (#13678)
     add b4de6be  Scheduler should acknowledge active runs properly (#13803)
     new 5d642a3  Bugfix: Unable to import Airflow plugins on Python 3.8 (#12859)
     new f9c5a7f  Requirements shoudl be removed. Sufficient time passed.
     new 201e963  Adds --no-rbac-ui flag for Breeze airflow 1.10 installation (#11315)
     new 52eb912  Enable PIP check for both CI and PROD image (#12664)
     new 50d6c9a  Improve wording of selective checks comments (#12701)
     new 4fca05e  User-friendly output of Breeze and CI scripts (#12735)
     new 70c5a8a  Fix chart jobs delete policy for improved idempotency (#12646)
     new 0c21aa1  Artifacts in Github Action have a short retention period (#12793)
     new ece0299  Adds airflow as viable docker command in official image (#12878)
     new 726e0e1  Clarifies version args for installing 1.10 in Docker (#12875)
     new 4c45910  Kubernetes worker pod doesn't use docker container entrypoint (#12766)
     new 6e3c049  Builds prod images on DockerHub from packages (#12908)
     new 0934ff0  Apply labels to Docker images in a single instruction (#12931)
     new ba4858f  Update CI to run tests againt v2-0-test branch (#10891)
     new ad2d9d4  Update CI to run tests againt v2-0-test branch (#10891)
     new 4903401  Update Dockerfile.ci (#12988)
     new bfde735  Update Dockerfile (#12987)
     new 17d53ab  Add changes from 1.10.14 (#12993)
     new a7a6729  Changes release image preparation to use PyPI packages (#12990)
     new c03148b  Fix gpg verification command (#13035)
     new 0c8fe86  Install airflow and providers from dist and verifies them  (#13033)
     new 4541025  Fixes image building in DockerHub (#13039)
     new 16c4ebd  fixup! Install airflow and providers from dist and verifies them  (#13033)
     new a1584c2  fixup! fixup! Install airflow and providers from dist and verifies them  (#13033)
     new f7ab31f  Allows to install Airflow in Breeze from PIP with configurable extras (#13055)
     new 97c2d63  Add identity pre-commit hook (#13089)
     new fa6c547  Production images on CI are now built from packages (#12685)
     new 408ed13  Skip identity pre-commit in ci and always display it first
     new c58a9d3  fixup! Production images on CI are now built from packages (#12685)
     new 81f34e8  Include airflow/contrib/executors in the dist package
     new b22cd92  Reset PIP version after eager upgrade (#13251)
     new 0841c3b  Click should be limited for Python 2.7
     new f5efce7  Default python version is used when building image
     new 4e9c93f  fixup! Include airflow/contrib/executors in the dist package
     new 08bb392  The default value in chart should be 2.0.0 (#13125)
     new e328ae4  Use new logging options on values.yaml (#13173)
     new b3d5666  Fix parenthesis preventing Keda ScaledObject creation (#13183)
     new 126c5ea  Update chart readme to remove astronomer references (#13210)
     new 753f79d  Allow webserver to read pod logs directly (#12598)
     new 1700789  fixup! Include airflow/contrib/executors in the dist package
     new d774ff8  Switch to Apache-owned GitHub actions (#13327)
     new 1626de8  Also add codecov action to apache airflow repo (#13328)
     new 503c4d3  Disable persisting credentials in Github Action's checkout (#13389)
     new c8d7883  Run "third party" github actions from submodules instead (#13514)

This update added new revisions after undoing existing revisions.
That is to say, some revisions that were in the old version of the
branch are not in the new version.  This situation occurs
when a user --force pushes a change and generates a repository
containing something like this:

 * -- * -- B -- O -- O -- O   (d9c973b)
            \
             N -- N -- N   refs/heads/v1-10-test (c8d7883)

You should already have received notification emails for all of the O
revisions, and so the following emails describe only the N revisions
from the common base, B.

Any revisions marked "omit" are not gone; other references still
refer to them.  Any revisions marked "discard" are gone forever.

The 44 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 airflow/bin/cli.py                                 |  2 +-
 airflow/jobs/scheduler_job.py                      |  1 +
 airflow/kubernetes/pod_generator.py                |  8 +-
 airflow/upgrade/README.md                          |  7 +-
 airflow/upgrade/checker.py                         | 28 ++++++-
 airflow/upgrade/formatters.py                      | 30 ++++---
 airflow/upgrade/problem.py                         | 10 ++-
 airflow/upgrade/rules/__init__.py                  |  4 +-
 airflow/upgrade/rules/base_rule.py                 |  7 ++
 .../custom_executors_require_full_path_rule.py     | 43 ++++++++++
 airflow/upgrade/rules/db_api_functions.py          | 97 ++++++++++++++++++++++
 airflow/upgrade/rules/import_changes.py            | 85 ++++++++++++++++---
 .../rules/no_additional_args_in_operators.py       | 74 +++++++++++++++++
 airflow/upgrade/rules/pod_template_file_rule.py    |  5 ++
 airflow/upgrade/rules/undefined_jinja_varaibles.py |  9 +-
 airflow/upgrade/setup.cfg                          |  4 +-
 airflow/www/app.py                                 |  4 +-
 airflow/www_rbac/app.py                            |  4 +-
 docs/conf.py                                       | 13 +++
 setup.py                                           |  3 +-
 tests/kubernetes/test_pod_generator.py             | 89 ++++++++++++++++++++
 tests/upgrade/rules/test_base_rule.py              | 16 ++++
 ...test_custom_executors_require_full_path_rule.py | 53 ++++++++++++
 tests/upgrade/rules/test_db_api_functions.py       | 71 ++++++++++++++++
 tests/upgrade/rules/test_import_changes.py         | 13 ++-
 ...ved.py => test_no_additional_args_operators.py} | 43 +++++-----
 .../rules/test_undefined_jinja_varaibles.py        |  8 +-
 tests/upgrade/test_formattes.py                    | 15 +++-
 tests/upgrade/test_problem.py                      |  7 +-
 29 files changed, 674 insertions(+), 79 deletions(-)
 create mode 100644 airflow/upgrade/rules/custom_executors_require_full_path_rule.py
 create mode 100644 airflow/upgrade/rules/db_api_functions.py
 create mode 100644 airflow/upgrade/rules/no_additional_args_in_operators.py
 create mode 100644 tests/upgrade/rules/test_custom_executors_require_full_path_rule.py
 create mode 100644 tests/upgrade/rules/test_db_api_functions.py
 copy tests/upgrade/rules/{test_mesos_executor_removed.py => test_no_additional_args_operators.py} (50%)


[airflow] 16/44: Update Dockerfile.ci (#12988)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4903401a5339265bd057cc6cef6e3ea9e92bcfa4
Author: jao6693 <fb...@gmail.com>
AuthorDate: Thu Dec 10 21:00:41 2020 +0100

    Update Dockerfile.ci (#12988)
    
    Fix permission issue in Azure DevOps when running the script install_mysql.sh, which prevents the build to succeed
    
    /bin/bash: ./scripts/docker/install_mysql.sh: Permission denied
    The command '/bin/bash -o pipefail -e -u -x -c ./scripts/docker/install_mysql.sh dev' returned a non-zero code: 126
    ##[error]The command '/bin/bash -o pipefail -e -u -x -c ./scripts/docker/install_mysql.sh dev' returned a non-zero code: 126
    ##[error]The process '/usr/bin/docker' failed with exit code 126
    
    (cherry picked from commit d84faa36a02f6a6ae2800e601bda4a20c3188190)
---
 Dockerfile.ci | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/Dockerfile.ci b/Dockerfile.ci
index bd00238..32b9383 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -107,7 +107,9 @@ RUN mkdir -pv /usr/share/man/man1 \
     && apt-get clean \
     && rm -rf /var/lib/apt/lists/*
 
-COPY scripts/docker scripts/docker
+COPY scripts/docker /scripts/docker
+# fix permission issue in Azure DevOps when running the script
+RUN chmod a+x /scripts/docker/install_mysql.sh
 RUN ./scripts/docker/install_mysql.sh dev
 
 RUN adduser airflow \


[airflow] 20/44: Fix gpg verification command (#13035)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c03148bd0ecee96782111443e9abf42769bbd2da
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sat Dec 12 19:30:09 2020 +0100

    Fix gpg verification command (#13035)
    
    (cherry picked from commit 825e9cb98411fe3999e79ee66d00e401fc4a91e0)
---
 dev/README_RELEASE_AIRFLOW.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md
index 2fb1294..1cd2f9c 100644
--- a/dev/README_RELEASE_AIRFLOW.md
+++ b/dev/README_RELEASE_AIRFLOW.md
@@ -379,7 +379,7 @@ Once you have the keys, the signatures can be verified by running this:
 ```shell script
 for i in *.asc
 do
-   echo "Checking $i"; gpg --verify `basename $i .asc`
+   echo "Checking $i"; gpg --verify $i
 done
 ```
 


[airflow] 25/44: Allows to install Airflow in Breeze from PIP with configurable extras (#13055)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit f7ab31f12217c926fa56e915b864d763371d30a8
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Mon Dec 14 15:10:06 2020 +0100

    Allows to install Airflow in Breeze from PIP with configurable extras (#13055)
    
    The extras configured by --extras Breeze switch are now
    passed to pip install command in case airfow is installed via
    --install-airflow-version or --install-airflow-reference switch.
    
    (cherry picked from commit 26c685425996a2e8127d349a7b98ec92c74ee3f2)
---
 .github/workflows/ci.yml                           |  2 +-
 breeze                                             | 10 ++---
 .../ci/{libraries => docker-compose}/_docker.env   | 52 ++++++++++++++++------
 scripts/ci/docker-compose/base.yml                 | 43 ++----------------
 scripts/ci/docker-compose/local-prod.yml           |  9 +---
 scripts/ci/libraries/_initialization.sh            |  5 +--
 scripts/ci/static_checks/check_license.sh          |  2 +-
 scripts/ci/tools/ci_clear_tmp.sh                   |  2 +-
 scripts/ci/tools/ci_fix_ownership.sh               |  2 +-
 scripts/in_container/_in_container_utils.sh        |  7 ---
 scripts/in_container/entrypoint_ci.sh              | 12 ++---
 11 files changed, 58 insertions(+), 88 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bb06e0d..33fb5d4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -207,6 +207,7 @@ jobs:
     runs-on: ubuntu-20.04
     needs: [build-info, ci-images]
     env:
+      SKIP: "pylint"
       MOUNT_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
     if: needs.build-info.outputs.basic-checks-only == 'false'
@@ -673,7 +674,6 @@ jobs:
       BACKEND: postgres
       RUN_TESTS: "true"
       RUNTIME: "kubernetes"
-      ENABLE_KIND_CLUSTER: "true"
       PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}"
       KUBERNETES_MODE: "${{ matrix.kubernetes-mode }}"
       KUBERNETES_VERSION: "${{ matrix.kubernetes-version }}"
diff --git a/breeze b/breeze
index 2565ad5..f6a45a5 100755
--- a/breeze
+++ b/breeze
@@ -58,7 +58,6 @@ export EXTRA_STATIC_CHECK_OPTIONS
 #    SCREEN_WIDTH
 #    MOUNT_LOCAL_SOURCES
 #    FORCE_PULL_IMAGES
-#    ENABLE_KIND_CLUSTER
 #    FORWARD_CREDENTIALS
 #    DB_RESET
 #    START_AIRFLOW
@@ -101,9 +100,6 @@ function breeze::setup_default_breeze_constants() {
     # This can be overridden by '--force-pull-images' flag
     export FORCE_PULL_IMAGES="false"
 
-    # Do not enable Kind Kubernetes cluster by default
-    export ENABLE_KIND_CLUSTER="false"
-
     # Forward common host credentials to docker (gcloud, aws etc.).
     export FORWARD_CREDENTIALS="false"
 
@@ -824,7 +820,9 @@ function breeze::parse_arguments() {
             INSTALL_AIRFLOW_VERSION="${2}"
             # Reference is mutually exclusive with version
             INSTALL_AIRFLOW_REFERENCE=""
-            echo "Installs version of Airflow: ${INSTALL_AIRFLOW_VERSION}"
+            # Skip mounting local sources when airflow is installed from remote
+            INSTALL_PROVIDERS_FROM_SOURCES="false"
+            echo "Installs version of Airflow: ${INSTALL_AIRFLOW_VERSION} and skip mounting sources"
             echo
             shift 2
             ;;
@@ -832,6 +830,8 @@ function breeze::parse_arguments() {
             INSTALL_AIRFLOW_REFERENCE="${2}"
             # Reference is mutually exclusive with version
             INSTALL_AIRFLOW_VERSION=""
+            # Skip mounting local sources when airflow is installed from remote
+            INSTALL_PROVIDERS_FROM_SOURCES="false"
             echo "Installs Airflow from reference: ${INSTALL_AIRFLOW_REFERENCE}"
             echo
             shift 2
diff --git a/scripts/ci/libraries/_docker.env b/scripts/ci/docker-compose/_docker.env
similarity index 76%
rename from scripts/ci/libraries/_docker.env
rename to scripts/ci/docker-compose/_docker.env
index 6bc53dc..1ab933c 100644
--- a/scripts/ci/libraries/_docker.env
+++ b/scripts/ci/docker-compose/_docker.env
@@ -14,25 +14,49 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-INSTALL_AIRFLOW_VERSION
-PYTHONDONTWRITEBYTECODE
-VERBOSE
-VERBOSE_COMMANDS
+AIRFLOW_CI_IMAGE
+AIRFLOW_EXTRAS
+BACKEND
+BACKPORT_PACKAGES
+BREEZE
+CI
+CI_BUILD_ID
+CI_JOB_ID
+CI_EVENT_TYPE
+CI_TARGET_REPO
+CI_TARGET_BRANCH
+COMMIT_SHA
+DB_RESET
+DEFAULT_CONSTRAINTS_BRANCH
+DISABLE_RBAC
+ENABLED_INTEGRATIONS
+ENABLED_SYSTEMS
+GITHUB_REGISTRY_PULL_IMAGE_TAG
 HOST_USER_ID
 HOST_GROUP_ID
 HOST_OS
 HOST_HOME
 HOST_AIRFLOW_SOURCES
-PYTHON_MAJOR_MINOR_VERSION
-BACKEND
-VERSION_SUFFIX_FOR_PYPI
-VERSION_SUFFIX_FOR_SVN
-PRINT_INFO_FROM_SCRIPTS
-CI
+INIT_SCRIPT_FILE
+INSTALL_AIRFLOW_VERSION
+INSTALL_PROVIDERS_FROM_SOURCES
+INSTALL_PACKAGES_FROM_DIST
+ISSUE_ID
 LOAD_DEFAULT_CONNECTIONS
 LOAD_EXAMPLES
-DEFAULT_CONSTRAINTS_BRANCH
-GITHUB_REGISTRY_PULL_IMAGE_TAG
-POSTGRES_VERSION
 MYSQL_VERSION
-DISABLE_RBAC
+NUM_RUNS
+PACKAGE_FORMAT
+POSTGRES_VERSION
+PRINT_INFO_FROM_SCRIPTS
+PYTHONDONTWRITEBYTECODE
+PYTHON_MAJOR_MINOR_VERSION
+RUN_TESTS
+RUN_INTEGRATION_TESTS
+RUN_SYSTEM_TESTS
+START_AIRFLOW
+TEST_TYPE
+VERBOSE
+VERBOSE_COMMANDS
+VERSION_SUFFIX_FOR_PYPI
+VERSION_SUFFIX_FOR_SVN
diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml
index 96bf0f6..f4f9851 100644
--- a/scripts/ci/docker-compose/base.yml
+++ b/scripts/ci/docker-compose/base.yml
@@ -24,47 +24,10 @@ services:
       - ADDITIONAL_PATH=~/.local/bin
       - CELERY_BROKER_URLS=amqp://guest:guest@rabbitmq:5672,redis://redis:6379/0
       - KUBECONFIG=/files/.kube/config
-      - BACKEND
-      - CI
-      - CI_BUILD_ID
-      - CI_JOB_ID
-      - CI_EVENT_TYPE
-      - CI_TARGET_REPO
-      - CI_TARGET_BRANCH
-      - COMMIT_SHA
-      - RUN_TESTS
-      - VERBOSE
-      - VEROSE_COMMANDS
-      - AIRFLOW_CI_IMAGE
-      - ENABLE_KIND_CLUSTER
-      - ENABLED_INTEGRATIONS
-      - RUN_INTEGRATION_TESTS
-      - TEST_TYPE
-      - GITHUB_TOKEN
-      - GITHUB_REPOSITORY
-      - ISSUE_ID
-      - NUM_RUNS
-      - BREEZE
-      - INSTALL_AIRFLOW_VERSION
-      - DB_RESET
-      - START_AIRFLOW
-      - LOAD_EXAMPLES
-      - LOAD_DEFAULT_CONNECTIONS
-      - INSTALL_WHEELS
-      - DISABLE_RBAC
-      - ENABLED_SYSTEMS
-      - RUN_SYSTEM_TESTS
-      - PYTHON_MAJOR_MINOR_VERSION
-      - HOST_USER_ID
-      - HOST_GROUP_ID
-      - HOST_HOME=${HOME}
       - HOST_AIRFLOW_SOURCES=${AIRFLOW_SOURCES}
-      - HOST_OS
-      - PYTHONDONTWRITEBYTECODE
-      - INIT_SCRIPT_FILE
-      - GITHUB_REGISTRY_PULL_IMAGE_TAG
-      - POSTGRES_VERSION
-      - MYSQL_VERSION
+      - HOST_HOME=${HOME}
+    env_file:
+      - _docker.env
     volumes:
       # Pass docker to inside of the container so that Kind and Moto tests can use it.
       - /var/run/docker.sock:/var/run/docker.sock
diff --git a/scripts/ci/docker-compose/local-prod.yml b/scripts/ci/docker-compose/local-prod.yml
index 5a4bc8a..79476d0 100644
--- a/scripts/ci/docker-compose/local-prod.yml
+++ b/scripts/ci/docker-compose/local-prod.yml
@@ -29,16 +29,9 @@ services:
       - ../../../.github:/opt/airflow/.github:cached
       - ../../../.inputrc:/root/.inputrc:cached
       - ../../../.kube:/root/.kube:cached
-      - ../../../dist:/dist:cached
       - ../../../scripts/in_container/prod/entrypoint_prod.sh:/entrypoint:cached
       - ../../../setup.cfg:/opt/airflow/setup.cfg:cached
       - ../../../setup.py:/opt/airflow/setup.py:cached
       - ../../../tests:/opt/airflow/tests:cached
       - ../../../tmp:/tmp:cached
-    environment:
-      - HOST_USER_ID
-      - HOST_GROUP_ID
-      - HOST_HOME=${HOME}
-      - HOST_AIRFLOW_SOURCES=${AIRFLOW_SOURCES}
-      - HOST_OS
-      - PYTHONDONTWRITEBYTECODE
+      - ../../../metastore_browser:/opt/airflow/metastore_browser:cached
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index ff7abe9..50cdbf0 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -258,7 +258,7 @@ function initialization::initialize_mount_variables() {
 
     EXTRA_DOCKER_FLAGS+=(
         "--rm"
-        "--env-file" "${AIRFLOW_SOURCES}/scripts/ci/libraries/_docker.env"
+        "--env-file" "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env"
     )
     export EXTRA_DOCKER_FLAGS
 }
@@ -404,8 +404,6 @@ function initialization::initialize_version_suffixes_for_package_building() {
 
 # Determine versions of kubernetes cluster and tools used
 function initialization::initialize_kubernetes_variables() {
-    # By default we assume the kubernetes cluster is not being started
-    export ENABLE_KIND_CLUSTER=${ENABLE_KIND_CLUSTER:="false"}
     # Currently supported versions of Kubernetes
     CURRENT_KUBERNETES_VERSIONS+=("v1.18.6" "v1.17.5" "v1.16.9")
     export CURRENT_KUBERNETES_VERSIONS
@@ -679,7 +677,6 @@ function initialization::make_constants_read_only() {
     readonly HOST_HOME
     readonly HOST_OS
 
-    readonly ENABLE_KIND_CLUSTER
     readonly KUBERNETES_MODE
     readonly KUBERNETES_VERSION
     readonly KIND_VERSION
diff --git a/scripts/ci/static_checks/check_license.sh b/scripts/ci/static_checks/check_license.sh
index a185a5a..03858e3 100755
--- a/scripts/ci/static_checks/check_license.sh
+++ b/scripts/ci/static_checks/check_license.sh
@@ -33,7 +33,7 @@ function run_check_license() {
     # We mount ALL airflow files for the licence check. We want to check them all!
     if ! docker run -v "${AIRFLOW_SOURCES}:/opt/airflow" -t \
             --user "$(id -ur):$(id -gr)" \
-            --rm --env-file "${AIRFLOW_SOURCES}/scripts/ci/libraries/_docker.env" \
+            --rm --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
             apache/airflow:apache-rat-2020.07.10-0.13 \
             --exclude-file /opt/airflow/.rat-excludes \
             --d /opt/airflow | tee "${AIRFLOW_SOURCES}/logs/rat-results.txt" ; then
diff --git a/scripts/ci/tools/ci_clear_tmp.sh b/scripts/ci/tools/ci_clear_tmp.sh
index 1806c96..d367967 100755
--- a/scripts/ci/tools/ci_clear_tmp.sh
+++ b/scripts/ci/tools/ci_clear_tmp.sh
@@ -29,6 +29,6 @@ read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker
 
 docker run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
     --rm \
-    --env-file "${AIRFLOW_SOURCES}/scripts/ci/libraries/_docker.env" \
+    --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
     "${AIRFLOW_CI_IMAGE}" \
     -c /opt/airflow/scripts/in_container/run_clear_tmp.sh
diff --git a/scripts/ci/tools/ci_fix_ownership.sh b/scripts/ci/tools/ci_fix_ownership.sh
index 2e3a909..2d57d65 100755
--- a/scripts/ci/tools/ci_fix_ownership.sh
+++ b/scripts/ci/tools/ci_fix_ownership.sh
@@ -35,6 +35,6 @@ read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker
 
 docker run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
     --rm \
-    --env-file "${AIRFLOW_SOURCES}/scripts/ci/libraries/_docker.env" \
+    --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
     "${AIRFLOW_CI_IMAGE}" \
     -c /opt/airflow/scripts/in_container/run_fix_ownership.sh
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index e2410fd..1dbcdd4 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -276,13 +276,6 @@ function uninstall_airflow_and_providers() {
     uninstall_airflow
 }
 
-function install_all_airflow_dependencies() {
-    echo
-    echo "Installing dependencies from 'all' extras"
-    echo
-    pip install ".[all]" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
-}
-
 function install_released_airflow_version() {
     local version="${1}"
     local extras="${2}"
diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh
index 55962aa..ba9c2ee 100755
--- a/scripts/in_container/entrypoint_ci.sh
+++ b/scripts/in_container/entrypoint_ci.sh
@@ -108,23 +108,23 @@ elif [[ ${INSTALL_AIRFLOW_VERSION} == "none"  ]]; then
     uninstall_airflow_and_providers
 elif [[ ${INSTALL_AIRFLOW_VERSION} == "wheel"  ]]; then
     echo
-    echo "Install airflow from wheel package with [all] extras but uninstalling providers."
+    echo "Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers."
     echo
     uninstall_airflow_and_providers
-    install_airflow_from_wheel "[all]"
+    install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]"
     uninstall_providers
 elif [[ ${INSTALL_AIRFLOW_VERSION} == "sdist"  ]]; then
     echo
-    echo "Install airflow from sdist package with [all] extras but uninstalling providers."
+    echo "Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers."
     echo
     uninstall_airflow_and_providers
-    install_airflow_from_sdist "[all]"
+    install_airflow_from_sdist "[${AIRFLOW_EXTRAS}]"
     uninstall_providers
 else
     echo
-    echo "Install airflow from PyPI including [all] extras"
+    echo "Install airflow from PyPI including [${AIRFLOW_EXTRAS}] extras"
     echo
-    install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" "[all]"
+    install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" "[${AIRFLOW_EXTRAS}]"
 fi
 if [[ ${INSTALL_PACKAGES_FROM_DIST=} == "true" ]]; then
     echo


[airflow] 33/44: Default python version is used when building image

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit f5efce7b6a2c065ae9b60bf5429ae555523f4f04
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Dec 23 15:46:35 2020 +0100

    Default python version is used when building image
    
    For image build the python version is passed via
    PYTHON_MAJOR_MINOR_VERSION but there is a part of the build
    (preparing airflow package) that uses python installed on host.
    
    This is fine for Master/2.0 to use same version as the image
    but it should be unified (and in 1.10 when trying to build 2.7
    image it would fail).
    
    (cherry picked from commit ba1111a6cc3906480394b22536d5adf8a3846c82)
---
 .github/workflows/build-images-workflow-run.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/build-images-workflow-run.yml b/.github/workflows/build-images-workflow-run.yml
index 93b2176..ee9c48c 100644
--- a/.github/workflows/build-images-workflow-run.yml
+++ b/.github/workflows/build-images-workflow-run.yml
@@ -327,7 +327,7 @@ jobs:
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
-          python-version: ${{  matrix.python-version }}
+          python-version: 3.6
         if: steps.defaults.outputs.proceed == 'true'
       - name: >
           Override "scripts/ci" with the "${{ needs.cancel-workflow-runs.outputs.targetBranch }}" branch


[airflow] 17/44: Update Dockerfile (#12987)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit bfde73556963a0e53e33bba1acdbdf8bb0b626d5
Author: jao6693 <fb...@gmail.com>
AuthorDate: Thu Dec 10 21:01:49 2020 +0100

    Update Dockerfile (#12987)
    
    Fix permission issue in Azure DevOps when running the script install_mysql.sh, which prevents the build to succeed
    
    /bin/bash: ./scripts/docker/install_mysql.sh: Permission denied
    The command '/bin/bash -o pipefail -e -u -x -c ./scripts/docker/install_mysql.sh dev' returned a non-zero code: 126
    ##[error]The command '/bin/bash -o pipefail -e -u -x -c ./scripts/docker/install_mysql.sh dev' returned a non-zero code: 126
    ##[error]The process '/usr/bin/docker' failed with exit code 126
    
    (cherry picked from commit 2ec03cd9267f1cfd7fa3fa437ffad7deee3241e8)
---
 Dockerfile | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index eaac1e1..23a9915 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -144,9 +144,10 @@ RUN mkdir -pv /usr/share/man/man1 \
 ARG INSTALL_MYSQL_CLIENT="true"
 ENV INSTALL_MYSQL_CLIENT=${INSTALL_MYSQL_CLIENT}
 
-COPY scripts/docker scripts/docker
+COPY scripts/docker /scripts/docker
 COPY docker-context-files /docker-context-files
-
+# fix permission issue in Azure DevOps when running the script
+RUN chmod a+x /scripts/docker/install_mysql.sh
 RUN ./scripts/docker/install_mysql.sh dev
 
 ARG AIRFLOW_REPO=apache/airflow
@@ -372,7 +373,9 @@ RUN mkdir -pv /usr/share/man/man1 \
 ARG INSTALL_MYSQL_CLIENT="true"
 ENV INSTALL_MYSQL_CLIENT=${INSTALL_MYSQL_CLIENT}
 
-COPY scripts/docker scripts/docker
+COPY scripts/docker /scripts/docker
+# fix permission issue in Azure DevOps when running the script
+RUN chmod a+x /scripts/docker/install_mysql.sh
 RUN ./scripts/docker/install_mysql.sh prod
 
 ENV AIRFLOW_UID=${AIRFLOW_UID}


[airflow] 28/44: Skip identity pre-commit in ci and always display it first

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 408ed13e35a90b147e633b21ae9e781d3635fa8e
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Dec 16 13:17:50 2020 +0100

    Skip identity pre-commit in ci and always display it first
    
    (cherry picked from commit 76f2215b95ab8da0d3c4e821138b6dd4275b7c79)
---
 .github/workflows/ci.yml | 4 ++--
 .pre-commit-config.yaml  | 8 ++++----
 2 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 33fb5d4..a449d2c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -207,7 +207,7 @@ jobs:
     runs-on: ubuntu-20.04
     needs: [build-info, ci-images]
     env:
-      SKIP: "pylint"
+      SKIP: "pylint,identity"
       MOUNT_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
     if: needs.build-info.outputs.basic-checks-only == 'false'
@@ -243,7 +243,7 @@ jobs:
     runs-on: ubuntu-20.04
     needs: [build-info]
     env:
-      SKIP: "build,mypy,flake8,pylint,bats-in-container-tests"
+      SKIP: "build,mypy,flake8,pylint,bats-in-container-tests,identity"
       MOUNT_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
     if: needs.build-info.outputs.basic-checks-only == 'true'
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2e9edda..2e6207d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -21,6 +21,10 @@ default_language_version:
   python: python3
 minimum_pre_commit_version: "1.20.0"
 repos:
+  - repo: meta
+    hooks:
+      - id: identity
+      - id: check-hooks-apply
   - repo: https://github.com/Lucas-C/pre-commit-hooks
     rev: v1.1.9
     hooks:
@@ -146,10 +150,6 @@ repos:
         args:
           - "--maxlevel"
           - "2"
-  - repo: meta
-    hooks:
-      - id: identity
-      - id: check-hooks-apply
   - repo: https://github.com/pre-commit/pre-commit-hooks
     rev: v3.3.0
     hooks:


[airflow] 10/44: Clarifies version args for installing 1.10 in Docker (#12875)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 726e0e148fa6f5d959d436892e53b8be5a3ad22d
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Mon Dec 7 16:08:55 2020 +0100

    Clarifies version args for installing 1.10 in Docker (#12875)
    
    This change clarifies that AIRFLOW_VERSION should be passed
    together with AIRFLOW_INSTALL_VERSION when the Docker image
    is build.
    
    Fixes #8612
    
    (cherry picked from commit 1dcef78b12ac4746345f4630966e0dc4e32e41da)
---
 IMAGES.rst                            |  2 ++
 docs/production-deployment.rst        | 10 +++++++++-
 scripts/ci/libraries/_build_images.sh |  1 +
 3 files changed, 12 insertions(+), 1 deletion(-)

diff --git a/IMAGES.rst b/IMAGES.rst
index 6a04428..513f8cf 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -300,6 +300,7 @@ additional apt dev and runtime dependencies.
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
@@ -334,6 +335,7 @@ based on example in `this comment <https://github.com/apache/airflow/issues/8605
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
diff --git a/docs/production-deployment.rst b/docs/production-deployment.rst
index 7964b34..335e713 100644
--- a/docs/production-deployment.rst
+++ b/docs/production-deployment.rst
@@ -134,6 +134,7 @@ additional apt dev and runtime dependencies.
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
@@ -166,6 +167,7 @@ based on example in `this comment <https://github.com/apache/airflow/issues/8605
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
@@ -245,6 +247,7 @@ or
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
@@ -516,7 +519,9 @@ production image. There are three types of build:
 | ``AIRFLOW_INSTALL_VERSION``       | Optional - might be used for      |
 |                                   | package installation case to      |
 |                                   | set Airflow version for example   |
-|                                   | "==1.10.14"                       |
+|                                   | "==1.10.14". Remember to also     |
+|                                   | Set ``AIRFLOW_VERSION``           |
+|                                   | when you use it.                  |
 +-----------------------------------+-----------------------------------+
 | ``AIRFLOW_CONSTRAINTS_REFERENCE`` | reference (branch or tag) from    |
 |                                   | GitHub where constraints file     |
@@ -583,6 +588,7 @@ of v1-10-test branch.
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1.10.14" \
@@ -598,6 +604,7 @@ additional python dependencies and pre-installed pip dependencies from 1.10.14 t
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1.10.14" \
@@ -615,6 +622,7 @@ additional apt dev and runtime dependencies.
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
     --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 17889ce..d054f15 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -647,6 +647,7 @@ function build_images::prepare_prod_build() {
         EXTRA_DOCKER_PROD_BUILD_FLAGS=(
             "--build-arg" "AIRFLOW_INSTALL_SOURCES=apache-airflow"
             "--build-arg" "AIRFLOW_INSTALL_VERSION===${INSTALL_AIRFLOW_VERSION}"
+            "--build-arg" "AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION}"
         )
         export AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
         if [[ ${AIRFLOW_VERSION} == "1.10.2" || ${AIRFLOW_VERSION} == "1.10.1" ]]; then


[airflow] 36/44: Use new logging options on values.yaml (#13173)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit e328ae44961d5c5d2360628f0e60fc5e99ae5dea
Author: Flávio de Assis <34...@users.noreply.github.com>
AuthorDate: Sat Dec 19 22:14:35 2020 -0300

    Use new logging options on values.yaml (#13173)
    
    (cherry picked from commit 23a47879ababe76f6cf9034a2bae055b2a91bf1f)
---
 chart/values.yaml | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)

diff --git a/chart/values.yaml b/chart/values.yaml
index cb605c3..d84a785 100644
--- a/chart/values.yaml
+++ b/chart/values.yaml
@@ -643,13 +643,16 @@ config:
   core:
     dags_folder: '{{ include "airflow_dags" . }}'
     load_examples: 'False'
-    colored_console_log: 'False'
     executor: '{{ .Values.executor }}'
+    # For Airflow 1.10, backward compatibility
+    colored_console_log: 'False'
     remote_logging: '{{- ternary "True" "False" .Values.elasticsearch.enabled }}'
   # Authentication backend used for the experimental API
   api:
     auth_backend: airflow.api.auth.backend.deny_all
   logging:
+    remote_logging: '{{- ternary "True" "False" .Values.elasticsearch.enabled }}'
+    colored_console_log: 'False'
     logging_level: DEBUG
   metrics:
     statsd_on: '{{ ternary "True" "False" .Values.statsd.enabled }}'
@@ -660,10 +663,8 @@ config:
     enable_proxy_fix: 'True'
     expose_config: 'True'
     rbac: 'True'
-
   celery:
     default_queue: celery
-
   scheduler:
     scheduler_heartbeat_sec: 5
     # For Airflow 1.10, backward compatibility
@@ -671,7 +672,6 @@ config:
     statsd_port: 9125
     statsd_prefix: airflow
     statsd_host: '{{ printf "%s-statsd" .Release.Name }}'
-
     # Restart Scheduler every 41460 seconds (11 hours 31 minutes)
     # The odd time is chosen so it is not always restarting on the same "hour" boundary
     run_duration: 41460
@@ -682,13 +682,11 @@ config:
     max_retries: 3
     timeout: 30
     retry_timeout: 'True'
-
   kerberos:
     keytab: '{{ .Values.kerberos.keytabPath }}'
     reinit_frequency: '{{ .Values.kerberos.reinitFrequency }}'
     principal: '{{ .Values.kerberos.principal }}'
     ccache: '{{ .Values.kerberos.ccacheMountPath }}/{{ .Values.kerberos.ccacheFileName }}'
-
   kubernetes:
     namespace: '{{ .Release.Namespace }}'
     airflow_configmap: '{{ include "airflow_config" . }}'


[airflow] 18/44: Add changes from 1.10.14 (#12993)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 17d53ab7bc0d7d555479a0c96961cd714a82d16c
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Thu Dec 10 20:52:29 2020 +0000

    Add changes from 1.10.14 (#12993)
    
    This commit adds Changelog, Updating.md and replaces 1.10.13 to 1.10.14
    across the codebase
    
    (cherry picked from commit 969d3ea4f86a39807d696ca6f47c7cdf23526333)
---
 .github/workflows/ci.yml |   1 +
 BREEZE.rst               | 117 ++++++++++++++++++++++++++++++++++++++++++++---
 CONTRIBUTING.rst         |  12 -----
 IMAGES.rst               |  67 +++++++++++++++++----------
 README.md                |  27 +++++++----
 breeze                   |   3 +-
 breeze-complete          |  14 +++---
 7 files changed, 181 insertions(+), 60 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ccfd50f..74670d3 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -289,6 +289,7 @@ jobs:
         with:
           name: airflow-documentation
           path: "./files/documentation"
+          retention-days: 7
 
   tests-helm:
     timeout-minutes: 20
diff --git a/BREEZE.rst b/BREEZE.rst
index 43705f9..1ed3cfe 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -541,6 +541,74 @@ dependencies). If you work offline and do not want to rebuild the images when ne
 ``FORCE_ANSWER_TO_QUESTIONS`` variable to ``no`` as described in the
 `Setting default behaviour for user interaction <#setting-default-behaviour-for-user-interaction>`_ section.
 
+Preparing packages
+------------------
+
+Breeze can also be used to prepare airflow packages - both "apache-airflow" main package and
+provider packages.
+
+You can read more about testing provider packages in
+`TESTING.rst <TESTING.rst#running-tests-with-packages>`_
+
+There are several commands that you can run in Breeze to manage and build packages:
+
+* preparing Provider Readme files
+* preparing Airflow packages
+* preparing Provider packages
+
+Preparing provider readme files is part of the release procedure by the release managers
+and it is described in detail in `dev <dev/README.md>`_ .
+
+You can prepare provider packages - by default regular provider packages are prepared, but with
+``--backport`` flag you can prepare backport packages.
+
+The packages are prepared in ``dist`` folder. Note, that this command cleans up the ``dist`` folder
+before running, so you should run it before generating airflow package below as it will be removed.
+
+The below example builds provider packages in the wheel format.
+
+.. code-block:: bash
+
+     ./breeze prepare-provider-packages
+
+If you run this command without packages, you will prepare all packages, you can however specify
+providers that you would like to build. By default only ``wheel`` packages are prepared,
+but you can change it providing optional --package-format flag.
+
+
+.. code-block:: bash
+
+     ./breeze prepare-provider-packages --package-format=both google amazon
+
+You can also prepare backport provider packages, if you specify ``--backport`` flag. You can read more
+about backport packages in `dev <dev/README.md>`_
+
+.. code-block:: bash
+
+     ./breeze prepare-provider-packages --backports --package-format=both google amazon
+
+You can see all providers available by running this command:
+
+.. code-block:: bash
+
+     ./breeze prepare-provider-packages -- --help
+
+
+You can also prepare airflow packages using breeze:
+
+.. code-block:: bash
+
+     ./breeze prepare-airflow-packages
+
+This prepares airflow .whl package in the dist folder.
+
+Again, you can specify optional ``--package-format`` flag to build airflow packages.
+
+.. code-block:: bash
+
+     ./breeze prepare-airflow-packages --package-format=bot
+
+
 Building Production images
 --------------------------
 
@@ -1067,6 +1135,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
     generate-constraints                     Generates pinned constraint files
     push-image                               Pushes images to registry
     initialize-local-virtualenv              Initializes local virtualenv
+    prepare-airflow-packages                 Prepares airflow packages
     setup-autocomplete                       Sets up autocomplete for breeze
     start-airflow                            Starts Scheduler and Webserver and enters the shell
     stop                                     Stops the docker-compose environment
@@ -1142,7 +1211,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
   Detailed usage for command: build-docs
 
 
-  breeze build-docs [-- <EXTRA_ARGS>]
+  breeze build-docs
 
         Builds Airflow documentation. The documentation is build inside docker container - to
         maintain the same build environment for everyone. Appropriate sources are mapped from
@@ -1150,8 +1219,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
         is generated ('docs/_build') are also mounted to the container - this way results of
         the documentation build is available in the host.
 
-        The possible extra args are: --docs-only, --spellcheck-only, --help
-
 
   ####################################################################################################
 
@@ -1187,8 +1254,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 1.10.8 1.10.7 1.10.6 1.10.5 1.10.4 1.10.3
-                 1.10.2 wheel none
+                 1.10.14 1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 wheel none
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
@@ -1553,6 +1619,44 @@ This is the current syntax for  `./breeze <./breeze>`_:
   ####################################################################################################
 
 
+  Detailed usage for command: prepare-airflow-packages
+
+
+  breeze prepare-airflow-packages [FLAGS]
+
+        Prepares airflow packages (sdist and wheel) in dist folder. Note that
+        prepare-provider-packages command cleans up the dist folder, so if you want also
+        to generate provider packages, make sure you run prepare-provider-packages first,
+        and prepare-airflow-packages second.
+
+        General form:
+
+        'breeze prepare-airflow-packages
+
+  Flags:
+
+  --package-format PACKAGE_FORMAT
+
+          Chooses format of packages to prepare.
+
+          One of:
+
+                 wheel,sdist,both
+
+          Default: 
+
+  -v, --verbose
+          Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
+          debugging - when you run breeze with --verbose flags you will be able to see the commands
+          executed under the hood and copy&paste them to your terminal to debug them more easily.
+
+          Note that you can further increase verbosity and see all the commands executed by breeze
+          by running 'export VERBOSE_COMMANDS="true"' before running breeze.
+
+
+  ####################################################################################################
+
+
   Detailed usage for command: setup-autocomplete
 
 
@@ -2105,8 +2209,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 1.10.8 1.10.7 1.10.6 1.10.5 1.10.4 1.10.3
-                 1.10.2 wheel none
+                 1.10.14 1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 wheel none
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 8c4bf35..bc5661c 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -818,18 +818,6 @@ You can only run one of the steps via ``--spellcheck-only`` or ``--docs-only``.
 
     ./breeze build-docs
 
-or just to run spell-check
-
-.. code-block:: bash
-
-     ./breeze build-docs -- --spellcheck-only
-
-or just to run documentation building
-
-.. code-block:: bash
-
-     ./breeze build-docs
-
 Also documentation is available as downloadable artifact in GitHub Actions after the CI builds your PR.
 
 **Known issues:**
diff --git a/IMAGES.rst b/IMAGES.rst
index 513f8cf..94ad6fd 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -24,8 +24,8 @@ Airflow has two images (build from Dockerfiles):
 
   * Production image (Dockerfile) - that can be used to build your own production-ready Airflow installation
     You can read more about building and using the production image in the
-    `Production Deployments <docs/production-deployment.rst>`_ document. The image is built using
-    `Dockerfile <Dockerfile>`_
+    `Production Deployments <https://airflow.apache.org/docs/apache-airflow/stable/production-deployment.html>`_ document.
+    The image is built using `Dockerfile <Dockerfile>`_
 
   * CI image (Dockerfile.ci) - used for running tests and local development. The image is built using
     `Dockerfile.ci <Dockerfile.ci>`_
@@ -64,7 +64,7 @@ The easiest way to build those images is to use `<BREEZE.rst>`_.
 
 Note! Breeze by default builds production image from local sources. You can change it's behaviour by
 providing ``--install-airflow-version`` parameter, where you can specify the
-tag/branch used to download Airflow package from in github repository. You can
+tag/branch used to download Airflow package from in GitHub repository. You can
 also change the repository itself by adding ``--dockerhub-user`` and ``--dockerhub-repo`` flag values.
 
 You can build the CI image using this command:
@@ -117,6 +117,16 @@ parameter to Breeze:
   ./breeze build-image --python 3.7 --additional-extras=presto \
       --production-image --install-airflow-version=1.10.14
 
+
+.. note::
+
+   On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
+   does not yet work with Apache Airflow and might leads to errors in installation - depends on your choice
+   of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
+   ``pip upgrade --pip==20.2.4`` or, in case you use Pip 20.3, you need to add option
+   ``--use-deprecated legacy-resolver`` to your pip install command.
+
+
 This will build the image using command similar to:
 
 .. code-block:: bash
@@ -125,14 +135,6 @@ This will build the image using command similar to:
       apache-airflow[async,aws,azure,celery,dask,elasticsearch,gcp,kubernetes,mysql,postgres,redis,slack,ssh,statsd,virtualenv,presto]==1.10.14 \
       --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.14/constraints-3.6.txt"
 
-.. note::
-   On 30th of November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver.
-   This resolver does not yet work with Apache Airflow and might leads to errors in installation -
-   depends on your choice of extras. In order to install Airflow you need to either downgrade
-   pip to version 20.2.4 ``pip upgrade --pip==20.2.4`` or, in case you use Pip 20.3, you need to add option
-   ``--use-deprecated legacy-resolver`` to your pip install command.
-
-
 You can also build production images from specific Git version via providing ``--install-airflow-reference``
 parameter to Breeze (this time constraints are taken from the ``constraints-master`` branch which is the
 HEAD of development for constraints):
@@ -142,6 +144,15 @@ HEAD of development for constraints):
     pip install "https://github.com/apache/airflow/archive/<tag>.tar.gz#egg=apache-airflow" \
       --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt"
 
+You can also skip installing airflow by providing ``--install-airflow-version none`` parameter to Breeze:
+
+.. code-block:: bash
+
+  ./breeze build-image --python 3.7 --additional-extras=presto \
+      --production-image --install-airflow-version=none --install-from-local-files-when-building
+
+In this case you usually install airflow and all packages in ``docker-context-files`` folder.
+
 Using cache during builds
 =========================
 
@@ -276,8 +287,8 @@ production image from the local sources.
 
 The image is primarily optimised for size of the final image, but also for speed of rebuilds - the
 'airflow-build-image' segment uses the same technique as the CI builds for pre-installing PIP dependencies.
-It first pre-installs them from the right github branch and only after that final airflow installation is
-done from either local sources or remote location (PIP or github repository).
+It first pre-installs them from the right GitHub branch and only after that final airflow installation is
+done from either local sources or remote location (PIP or GitHub repository).
 
 Customizing the image
 .....................
@@ -409,7 +420,7 @@ The following build arguments (``--build-arg`` in docker build command) can be u
 |                                          |                                          | file has to be in docker context so      |
 |                                          |                                          | it's best to place such file in          |
 |                                          |                                          | one of the folders included in           |
-|                                          |                                          | dockerignore                . for example in the        |
+|                                          |                                          | .dockerignore. for example in the        |
 |                                          |                                          | 'docker-context-files'. Note that the    |
 |                                          |                                          | location does not work for the first     |
 |                                          |                                          | stage of installation when the           |
@@ -418,21 +429,31 @@ The following build arguments (``--build-arg`` in docker build command) can be u
 |                                          |                                          | set to true. Default location from       |
 |                                          |                                          | GitHub is used in this case.             |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_LOCAL_PIP_WHEELS``             | ``false``                                | If set to true, Airflow and it's         |
+| ``AIRFLOW_CONSTRAINTS_REFERENCE``        | ``constraints-master``                   | reference (branch or tag) from GitHub    |
+|                                          |                                          | repository from which constraints are    |
+|                                          |                                          | used. By default it is set to            |
+|                                          |                                          | ``constraints-master`` but can be        |
+|                                          |                                          | ``constraints-1-10`` for 1.10.* versions |
+|                                          |                                          | or it could point to specific version    |
+|                                          |                                          | for example ``constraints-1.10.12``      |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``INSTALL_FROM_DOCKER_CONTEXT_FILES``    | ``false``                                | If set to true, Airflow and it's         |
 |                                          |                                          | dependencies are installed from locally  |
 |                                          |                                          | downloaded .whl files placed in the      |
 |                                          |                                          | ``docker-context-files``.                |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_EXTRAS``                       | ``all``                                  | extras to install                        |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``INSTALL_AIRFLOW_VIA_PIP``              | ``false``                                | If set to true, Airflow is installed via |
-|                                          |                                          | pip install. if you want to install      |
+| ``INSTALL_FROM_PYPI``                    | ``true``                                 | If set to true, Airflow is installed     |
+|                                          |                                          | from pypi. If you want to install        |
 |                                          |                                          | Airflow from externally provided binary  |
 |                                          |                                          | package you can set it to false, place   |
 |                                          |                                          | the package in ``docker-context-files``  |
-|                                          |                                          | and set ``AIRFLOW_LOCAL_PIP_WHEELS`` to  |
-|                                          |                                          | true. You have to also set to true the   |
+|                                          |                                          | and set                                  |
+|                                          |                                          | ``INSTALL_FROM_DOCKER_CONTEXT_FILES`` to |
+|                                          |                                          | true. For this you have to also set the  |
 |                                          |                                          | ``AIRFLOW_PRE_CACHED_PIP_PACKAGES`` flag |
+|                                          |                                          | to false                                 |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_PRE_CACHED_PIP_PACKAGES``      | ``true``                                 | Allows to pre-cache airflow PIP packages |
 |                                          |                                          | from the GitHub of Apache Airflow        |
@@ -530,7 +551,7 @@ Production images
 -----------------
 
 You can find details about using, building, extending and customising the production images in the
-`Latest documentation <https://airflow.readthedocs.io/en/latest/production-deployment.html>`_
+`Latest documentation <https://airflow.apache.org/docs/apache-airflow/stable/production-deployment.html>`_
 
 
 Image manifests
@@ -615,10 +636,8 @@ The entrypoint performs those operations:
 Using, customising, and extending the production image
 ======================================================
 
-You can read more about using, customising, and extending the production image in the documentation:
-
-* [Stable docs](https://airflow.apache.org/docs/stable/production-deployment.html)
-* [Latest docs from master branch](https://airflow.readthedocs.io/en/latest/production-deployment.html
+You can read more about using, customising, and extending the production image in the
+`documentation <https://airflow.apache.org/docs/apache-airflow/stable/production-deployment.html>`_.
 
 Alpha versions of 1.10.10 production-ready images
 =================================================
diff --git a/README.md b/README.md
index 79cceed..2d7edd6 100644
--- a/README.md
+++ b/README.md
@@ -82,18 +82,22 @@ Apache Airflow is tested with:
 | PostgreSQL   | 9.6, 10, 11, 12, 13       | 9.6, 10, 11, 12, 13      |
 | MySQL        | 5.7, 8                    | 5.6, 5.7                 |
 | SQLite       | latest stable             | latest stable            |
-| Kubernetes   | 1.16.2, 1.17.0            | 1.16.2, 1.17.0           |
+| Kubernetes   | 1.16.9, 1.17.5, 1.18.6    | 1.16.9, 1.17.5, 1.18.6   |
 
-**Note:**  SQLite is used primarily for development purpose.
+**Note:** MariaDB and MySQL 5.x are unable to or have limitations with
+running multiple schedulers -- please see the "Scheduler" docs.
+
+**Note:** SQLite is used in Airflow tests. Do not use it in production.
 
 ### Additional notes on Python version requirements
 
 * Stable version [requires](https://github.com/apache/airflow/issues/8162) at least Python 3.5.3 when using Python 3
 
 ## Getting started
+
 Visit the official Airflow website documentation (latest **stable** release) for help with [installing Airflow](https://airflow.apache.org/installation.html), [getting started](https://airflow.apache.org/start.html), or walking through a more complete [tutorial](https://airflow.apache.org/tutorial.html).
 
-> Note: If you're looking for documentation for master branch (latest development branch): you can find it on [ReadTheDocs](https://airflow.readthedocs.io/en/latest/).
+> Note: If you're looking for documentation for master branch (latest development branch): you can find it on [s.apache.org/airflow-docs](https://s.apache.org/airflow-docs/).
 
 For more information on Airflow's Roadmap or Airflow Improvement Proposals (AIPs), visit the [Airflow Wiki](https://cwiki.apache.org/confluence/display/AIRFLOW/Airflow+Home).
 
@@ -117,6 +121,15 @@ correct Airflow tag/version/branch and python versions in the URL.
 
 1. Installing just Airflow:
 
+NOTE!!!
+
+On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
+does not yet work with Apache Airflow and might leads to errors in installation - depends on your choice
+of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
+`pip upgrade --pip==20.2.4` or, in case you use Pip 20.3, you need to add option
+`--use-deprecated legacy-resolver` to your pip install command.
+
+
 ```bash
 pip install apache-airflow==1.10.14 \
  --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.14/constraints-3.7.txt"
@@ -130,14 +143,12 @@ depends on your choice of extras. In order to install Airflow you need to either
 pip to version 20.2.4 `pip upgrade --pip==20.2.4` or, in case you use Pip 20.3, you need to add option
 `--use-deprecated legacy-resolver` to your pip install command.
 
-
-2. Installing with extras (for example postgres,gcp)
 ```bash
-pip install apache-airflow[postgres,gcp]==1.10.14 \
+pip install apache-airflow[postgres,google]==1.10.14 \
  --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-1.10.14/constraints-3.7.txt"
 ```
 
-For information on installing backport providers check https://airflow.readthedocs.io/en/latest/backport-providers.html.
+For information on installing backport providers check [/docs/backport-providers.rst][/docs/backport-providers.rst].
 
 ## Official source code
 
@@ -165,7 +176,7 @@ Those are - in the order of most common ways people install Airflow:
 - [Docker Images](https://hub.docker.com/repository/docker/apache/airflow) to install airflow via
   `docker` tool, use them in Kubernetes, Helm Charts, `docker-compose`, `docker swarm` etc. You can
   read more about using, customising, and extending the images in the
-  [Latest docs](https://airflow.readthedocs.io/en/latest/production-deployment.html), and
+  [Latest docs](https://airflow.apache.org/docs/apache-airflow/stable/production-deployment.html), and
   learn details on the internals in the [IMAGES.rst](IMAGES.rst) document.
 - [Tags in GitHub](https://github.com/apache/airflow/tags) to retrieve the git project sources that
   were used to generate official source packages via git
diff --git a/breeze b/breeze
index e9a9557..2565ad5 100755
--- a/breeze
+++ b/breeze
@@ -1567,7 +1567,7 @@ ${CMDNAME} exec [-- <EXTRA_ARGS>]
       webserver, workers, database console and interactive terminal.
 "
     export DETAILED_USAGE_BUILD_DOCS="
-${CMDNAME} build-docs [-- <EXTRA_ARGS>]
+${CMDNAME} build-docs
 
       Builds Airflow documentation. The documentation is build inside docker container - to
       maintain the same build environment for everyone. Appropriate sources are mapped from
@@ -1575,7 +1575,6 @@ ${CMDNAME} build-docs [-- <EXTRA_ARGS>]
       is generated ('docs/_build') are also mounted to the container - this way results of
       the documentation build is available in the host.
 
-      The possible extra args are: --docs-only, --spellcheck-only, --help
 "
     readonly DETAILED_USAGE_BUILD_DOCS
     export DETAILED_USAGE_BUILD_IMAGE="
diff --git a/breeze-complete b/breeze-complete
index df81093..5502eec 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -34,6 +34,7 @@ _breeze_allowed_mysql_versions="5.6 5.7"
 _breeze_allowed_postgres_versions="9.6 10 11 12 13"
 _breeze_allowed_kind_operations="start stop restart status deploy test shell k9s"
 _breeze_allowed_test_types="All Core Integration Heisentests Postgres MySQL Helm"
+_breeze_allowed_package_formats="wheel sdist both"
 
 # shellcheck disable=SC2034
 {
@@ -46,21 +47,16 @@ _breeze_allowed_test_types="All Core Integration Heisentests Postgres MySQL Helm
     _breeze_default_postgres_version=$(echo "${_breeze_allowed_postgres_versions}" | awk '{print $1}')
     _breeze_default_mysql_version=$(echo "${_breeze_allowed_mysql_versions}" | awk '{print $1}')
     _breeze_default_test_type=$(echo "${_breeze_allowed_test_types}" | awk '{print $1}')
+    _breeze_default_package_format=$(echo "${_breeze_allowed_package_formats}" | awk '{print $1}')
 }
 
 _breeze_allowed_install_airflow_versions=$(cat <<-EOF
+1.10.14
 1.10.13
 1.10.12
 1.10.11
 1.10.10
 1.10.9
-1.10.8
-1.10.7
-1.10.6
-1.10.5
-1.10.4
-1.10.3
-1.10.2
 wheel
 none
 EOF
@@ -162,6 +158,7 @@ exec
 generate-constraints
 push-image
 initialize-local-virtualenv
+prepare-airflow-packages
 setup-autocomplete
 start-airflow
 stop
@@ -265,6 +262,9 @@ function breeze_complete::get_known_values_breeze() {
     --test-type)
         _breeze_known_values="${_breeze_allowed_test_types}"
         ;;
+    --package-format)
+        _breeze_known_values="${_breeze_allowed_package_formats}"
+        ;;
     *)
         _breeze_known_values=""
         ;;


[airflow] 02/44: Requirements shoudl be removed. Sufficient time passed.

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit f9c5a7f2631371155a3cb5dec0e37b37117ec5da
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Sun Dec 13 12:32:46 2020 +0100

    Requirements shoudl be removed. Sufficient time passed.
---
 requirements/REMOVE.md                  |  22 --
 requirements/requirements-python2.7.txt | 343 --------------------------------
 requirements/requirements-python3.5.txt | 326 ------------------------------
 requirements/requirements-python3.6.txt | 333 -------------------------------
 requirements/requirements-python3.7.txt | 330 ------------------------------
 requirements/requirements-python3.8.txt | 329 ------------------------------
 requirements/setup-2.7.md5              |   1 -
 requirements/setup-3.5.md5              |   1 -
 requirements/setup-3.6.md5              |   1 -
 requirements/setup-3.7.md5              |   1 -
 requirements/setup-3.8.md5              |   1 -
 11 files changed, 1688 deletions(-)

diff --git a/requirements/REMOVE.md b/requirements/REMOVE.md
deleted file mode 100644
index e5163fb..0000000
--- a/requirements/REMOVE.md
+++ /dev/null
@@ -1,22 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements.  See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership.  The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied.  See the License for the
- specific language governing permissions and limitations
- under the License.
- -->
-
-This directory should be removed as soon as we release Airflow 1.10.12
-and sufficient time passes for everyone to switch to new way of retrieving
-constraints.
diff --git a/requirements/requirements-python2.7.txt b/requirements/requirements-python2.7.txt
deleted file mode 100644
index 2dc0f9b..0000000
--- a/requirements/requirements-python2.7.txt
+++ /dev/null
@@ -1,343 +0,0 @@
-# Editable install with no version control (apache-airflow==1.10.11)
-Babel==2.8.0
-Flask-Admin==1.5.4
-Flask-AppBuilder==1.13.1
-Flask-Babel==0.12.2
-Flask-Bcrypt==0.7.1
-Flask-Caching==1.3.3
-Flask-JWT-Extended==3.24.1
-Flask-Login==0.4.1
-Flask-OpenID==1.2.5
-Flask-SQLAlchemy==2.4.4
-Flask-WTF==0.14.3
-Flask==1.1.2
-JPype1==0.7.1
-JayDeBeApi==1.2.3
-Jinja2==2.11.2
-Mako==1.1.3
-Markdown==2.6.11
-MarkupSafe==1.1.1
-PyHive==0.6.2
-PyJWT==1.7.1
-PyNaCl==1.4.0
-PySmbClient==0.1.5
-PyYAML==5.3.1
-Pygments==2.5.2
-SQLAlchemy-JSONField==0.8.0
-SQLAlchemy==1.3.18
-Sphinx==1.8.5
-Unidecode==1.1.1
-WTForms==2.3.1
-Werkzeug==0.16.1
-adal==1.2.4
-alabaster==0.7.12
-alembic==1.4.2
-amqp==2.6.0
-analytics-python==1.2.9
-ansiwrap==0.8.4
-apipkg==1.5
-apispec==2.0.2
-appdirs==1.4.4
-argcomplete==1.12.0
-asn1crypto==1.3.0
-aspy.yaml==1.3.0
-astroid==1.6.6
-atlasclient==1.0.0
-atomicwrites==1.4.0
-attrs==19.3.0
-aws-sam-translator==1.25.0
-aws-xray-sdk==2.6.0
-azure-common==1.1.25
-azure-cosmos==3.2.0
-azure-datalake-store==0.0.48
-azure-mgmt-containerinstance==1.5.0
-azure-mgmt-nspkg==3.0.2
-azure-mgmt-resource==10.1.0
-azure-nspkg==3.0.2
-azure-storage-blob==2.1.0
-azure-storage-common==2.1.0
-azure-storage-nspkg==3.1.0
-azure-storage==0.36.0
-backports-abc==0.5
-backports.functools-lru-cache==1.6.1
-backports.shutil-get-terminal-size==1.0.0
-backports.ssl-match-hostname==3.7.0.1
-backports.tempfile==1.0
-backports.weakref==1.0.post1
-bcrypt==3.1.7
-beautifulsoup4==4.7.1
-billiard==3.6.3.0
-bleach==3.1.5
-blinker==1.4
-boto3==1.14.25
-boto==2.49.0
-botocore==1.17.25
-cached-property==1.5.1
-cachetools==3.1.1
-cassandra-driver==3.20.2
-cattrs==1.0.0
-celery==4.4.6
-certifi==2020.6.20
-cffi==1.14.0
-cfgv==2.0.1
-cfn-lint==0.34.0
-cgroupspy==0.1.6
-chardet==3.0.4
-click==6.7
-cloudant==0.5.10
-colorama==0.4.3
-colorlog==4.0.2
-configparser==3.5.3
-contextdecorator==0.10.0
-contextlib2==0.6.0.post1
-cookies==2.2.1
-coverage==5.2
-croniter==0.3.34
-cryptography==3.0
-cx-Oracle==7.3.0
-datadog==0.38.0
-decorator==4.4.2
-defusedxml==0.6.0
-dill==0.3.2
-distlib==0.3.1
-dnspython==1.16.0
-docker-pycreds==0.4.0
-docker==3.7.3
-docopt==0.6.2
-docutils==0.16
-ecdsa==0.15
-elasticsearch-dsl==5.4.0
-elasticsearch==5.5.3
-email-validator==1.1.1
-entrypoints==0.3
-enum34==1.1.10
-execnet==1.7.1
-fastavro==0.23.6
-filelock==3.0.12
-flake8-colors==0.1.6
-flake8==3.8.3
-flaky==3.7.0
-flask-swagger==0.2.14
-flower==0.9.5
-freezegun==0.3.15
-funcsigs==1.0.2
-functools32==3.2.3.post2
-future-fstrings==1.2.0
-future==0.18.2
-futures==3.3.0
-gcsfs==0.2.3
-google-api-core==1.22.0
-google-api-python-client==1.10.0
-google-auth-httplib2==0.0.4
-google-auth-oauthlib==0.4.1
-google-auth==1.19.2
-google-cloud-bigquery==1.26.0
-google-cloud-bigtable==1.3.0
-google-cloud-container==1.0.1
-google-cloud-core==1.3.0
-google-cloud-dlp==1.0.0
-google-cloud-language==1.3.0
-google-cloud-secret-manager==1.0.0
-google-cloud-spanner==1.17.1
-google-cloud-speech==1.3.2
-google-cloud-storage==1.29.0
-google-cloud-texttospeech==1.0.1
-google-cloud-translate==2.0.1
-google-cloud-videointelligence==1.15.0
-google-cloud-vision==1.0.0
-google-resumable-media==0.5.1
-googleapis-common-protos==1.52.0
-graphviz==0.14.1
-grpc-google-iam-v1==0.12.3
-grpcio-gcp==0.2.2
-grpcio==1.30.0
-gunicorn==19.10.0
-hdfs==2.5.8
-hmsclient==0.1.1
-httplib2==0.18.1
-humanize==1.0.0
-hvac==0.10.4
-identify==1.4.25
-idna==2.10
-ijson==2.6.1
-imagesize==1.2.0
-importlib-metadata==1.7.0
-importlib-resources==3.0.0
-inflection==0.3.1
-ipaddress==1.0.23
-ipdb==0.13.3
-ipython-genutils==0.2.0
-ipython==5.10.0
-iso8601==0.1.12
-isodate==0.6.0
-itsdangerous==1.1.0
-jira==2.0.0
-jmespath==0.10.0
-json-merge-patch==0.2
-jsondiff==1.1.2
-jsonpatch==1.26
-jsonpickle==1.4.1
-jsonpointer==2.0
-jsonschema==3.2.0
-junit-xml==1.9
-jupyter-client==5.3.5
-jupyter-core==4.6.3
-kombu==4.6.3
-kubernetes==11.0.0
-lazy-object-proxy==1.5.0
-ldap3==2.7
-linecache2==1.0.0
-lockfile==0.12.2
-marshmallow-enum==1.5.1
-marshmallow-sqlalchemy==0.18.0
-marshmallow==2.19.5
-mccabe==0.6.1
-mistune==0.8.4
-mock==3.0.5
-mongomock==3.19.0
-monotonic==1.5
-more-itertools==5.0.0
-moto==1.3.14
-msrest==0.6.17
-msrestazure==0.6.4
-multi-key-dict==2.0.3
-mysqlclient==1.3.14
-natsort==6.2.1
-nbconvert==5.6.1
-nbformat==4.4.0
-networkx==2.2
-nodeenv==1.4.0
-nteract-scrapbook==0.3.1
-ntlm-auth==1.5.0
-numpy==1.16.6
-oauthlib==3.1.0
-oscrypto==1.2.0
-packaging==20.4
-pandas-gbq==0.13.1
-pandas==0.24.2
-pandocfilters==1.4.2
-papermill==1.2.1
-parameterized==0.7.4
-paramiko==2.7.1
-pathlib2==2.3.5
-pathspec==0.8.0
-pbr==5.4.5
-pendulum==1.4.4
-pexpect==4.8.0
-pickleshare==0.7.5
-pinotdb==0.1.1
-pluggy==0.13.1
-pre-commit==1.21.0
-presto-python-client==0.7.0
-prison==0.1.0
-prometheus-client==0.8.0
-prompt-toolkit==1.0.18
-protobuf==3.12.2
-psutil==5.7.2
-psycopg2-binary==2.8.5
-ptyprocess==0.6.0
-py==1.9.0
-pyOpenSSL==19.1.0
-pyasn1-modules==0.2.8
-pyasn1==0.4.8
-pycodestyle==2.6.0
-pycparser==2.20
-pycryptodomex==3.9.8
-pydata-google-auth==1.1.0
-pydruid==0.5.8
-pyflakes==2.2.0
-pykerberos==1.2.1
-pymongo==3.10.1
-pymssql==2.1.4
-pyparsing==2.4.7
-pyrsistent==0.16.0
-pysftp==0.2.9
-pytest-cov==2.10.0
-pytest-forked==1.2.0
-pytest-instafail==0.4.2
-pytest-rerunfailures==9.0
-pytest-timeout==1.4.2
-pytest-xdist==1.33.0
-pytest==4.6.11
-python-daemon==2.2.4
-python-dateutil==2.8.1
-python-editor==1.0.4
-python-http-client==3.2.7
-python-jenkins==1.7.0
-python-jose==3.1.0
-python-nvd3==0.15.0
-python-openid==2.2.5
-python-slugify==4.0.1
-pytz==2020.1
-pytzdata==2020.1
-pywinrm==0.4.1
-pyzmq==19.0.1
-qds-sdk==1.16.0
-redis==3.5.3
-requests-futures==0.9.4
-requests-kerberos==0.12.0
-requests-mock==1.8.0
-requests-ntlm==1.1.0
-requests-oauthlib==1.3.0
-requests-toolbelt==0.9.1
-requests==2.24.0
-responses==0.10.15
-rsa==4.0
-s3transfer==0.3.3
-sasl==0.2.1
-scandir==1.10.0
-sendgrid==5.6.0
-sentinels==1.0.0
-sentry-sdk==0.16.1
-setproctitle==1.1.10
-simplegeneric==0.8.1
-singledispatch==3.4.0.3
-six==1.15.0
-slackclient==1.3.2
-snakebite==2.11.0
-snowballstemmer==2.0.0
-snowflake-connector-python==2.1.3
-snowflake-sqlalchemy==1.2.3
-soupsieve==1.9.6
-sphinx-argparse==0.2.5
-sphinx-autoapi==1.0.0
-sphinx-jinja==1.1.1
-sphinx-rtd-theme==0.5.0
-sphinxcontrib-dotnetdomain==0.4
-sphinxcontrib-golangdomain==0.2.0.dev0
-sphinxcontrib-httpdomain==1.7.0
-sphinxcontrib-websupport==1.1.2
-sshpubkeys==3.1.0
-sshtunnel==0.1.5
-tabulate==0.8.7
-tenacity==4.12.0
-testpath==0.4.4
-text-unidecode==1.3
-textwrap3==0.9.2
-thrift-sasl==0.4.2
-thrift==0.13.0
-tokenize-rt==3.2.0
-toml==0.10.1
-tornado==5.1.1
-tqdm==4.48.0
-traceback2==1.4.0
-traitlets==4.3.3
-typing-extensions==3.7.4.2
-typing==3.7.4.3
-tzlocal==1.5.1
-unicodecsv==0.14.1
-unittest2==1.1.0
-uritemplate==3.0.1
-urllib3==1.25.9
-vertica-python==0.10.4
-vine==1.3.0
-virtualenv==20.0.27
-wcwidth==0.2.5
-webencodings==0.5.1
-websocket-client==0.57.0
-wrapt==1.12.1
-xmltodict==0.12.0
-yamllint==1.24.2
-zdesk==2.7.1
-zipp==1.2.0
-zope.deprecation==4.4.0
diff --git a/requirements/requirements-python3.5.txt b/requirements/requirements-python3.5.txt
deleted file mode 100644
index 211b6e2..0000000
--- a/requirements/requirements-python3.5.txt
+++ /dev/null
@@ -1,326 +0,0 @@
-# Editable install with no version control (apache-airflow==1.10.11)
-Babel==2.8.0
-Flask-Admin==1.5.4
-Flask-AppBuilder==1.13.1
-Flask-Babel==0.12.2
-Flask-Bcrypt==0.7.1
-Flask-Caching==1.3.3
-Flask-JWT-Extended==3.24.1
-Flask-Login==0.4.1
-Flask-OpenID==1.2.5
-Flask-SQLAlchemy==2.4.4
-Flask-WTF==0.14.3
-Flask==1.1.2
-JPype1==0.7.1
-JayDeBeApi==1.2.3
-Jinja2==2.11.2
-Mako==1.1.3
-Markdown==2.6.11
-MarkupSafe==1.1.1
-PyHive==0.6.2
-PyJWT==1.7.1
-PyNaCl==1.4.0
-PySmbClient==0.1.5
-PyYAML==5.3.1
-Pygments==2.6.1
-SQLAlchemy-JSONField==0.9.0
-SQLAlchemy==1.3.18
-Sphinx==3.1.2
-Unidecode==1.1.1
-WTForms==2.3.1
-Werkzeug==0.16.1
-adal==1.2.4
-alabaster==0.7.12
-alembic==1.4.2
-amqp==2.6.0
-analytics-python==1.2.9
-ansiwrap==0.8.4
-apipkg==1.5
-apispec==3.3.1
-appdirs==1.4.4
-argcomplete==1.12.0
-asn1crypto==1.3.0
-aspy.yaml==1.3.0
-astroid==2.4.2
-atlasclient==1.0.0
-attrs==19.3.0
-aws-sam-translator==1.25.0
-aws-xray-sdk==2.6.0
-azure-common==1.1.25
-azure-cosmos==3.2.0
-azure-datalake-store==0.0.48
-azure-mgmt-containerinstance==1.5.0
-azure-mgmt-resource==10.1.0
-azure-nspkg==3.0.2
-azure-storage-blob==2.1.0
-azure-storage-common==2.1.0
-azure-storage==0.36.0
-backcall==0.2.0
-bcrypt==3.1.7
-beautifulsoup4==4.7.1
-billiard==3.6.3.0
-blinker==1.4
-boto3==1.14.25
-boto==2.49.0
-botocore==1.17.25
-cached-property==1.5.1
-cachetools==4.1.1
-cassandra-driver==3.20.2
-cattrs==1.0.0
-celery==4.4.6
-certifi==2020.6.20
-cffi==1.14.0
-cfgv==2.0.1
-cfn-lint==0.34.0
-cgroupspy==0.1.6
-chardet==3.0.4
-click==6.7
-cloudant==0.5.10
-colorama==0.4.3
-colorlog==4.0.2
-configparser==3.5.3
-coverage==5.2
-croniter==0.3.34
-cryptography==3.0
-cx-Oracle==8.0.0
-datadog==0.38.0
-decorator==4.4.2
-defusedxml==0.6.0
-dill==0.3.2
-distlib==0.3.1
-dnspython==1.16.0
-docker-pycreds==0.4.0
-docker==3.7.3
-docopt==0.6.2
-docutils==0.16
-ecdsa==0.15
-elasticsearch-dsl==5.4.0
-elasticsearch==5.5.3
-email-validator==1.1.1
-entrypoints==0.3
-execnet==1.7.1
-fastavro==0.23.6
-filelock==3.0.12
-flake8-colors==0.1.6
-flake8==3.8.3
-flaky==3.7.0
-flask-swagger==0.2.14
-flower==0.9.5
-freezegun==0.3.15
-fsspec==0.7.4
-funcsigs==1.0.2
-future-fstrings==1.2.0
-future==0.18.2
-gcsfs==0.6.2
-google-api-core==1.22.0
-google-api-python-client==1.10.0
-google-auth-httplib2==0.0.4
-google-auth-oauthlib==0.4.1
-google-auth==1.19.2
-google-cloud-bigquery==1.26.0
-google-cloud-bigtable==1.3.0
-google-cloud-container==1.0.1
-google-cloud-core==1.3.0
-google-cloud-dlp==1.0.0
-google-cloud-language==1.3.0
-google-cloud-secret-manager==1.0.0
-google-cloud-spanner==1.17.1
-google-cloud-speech==1.3.2
-google-cloud-storage==1.29.0
-google-cloud-texttospeech==1.0.1
-google-cloud-translate==2.0.1
-google-cloud-videointelligence==1.15.0
-google-cloud-vision==1.0.0
-google-resumable-media==0.5.1
-googleapis-common-protos==1.52.0
-graphviz==0.14.1
-grpc-google-iam-v1==0.12.3
-grpcio-gcp==0.2.2
-grpcio==1.30.0
-gunicorn==20.0.4
-hdfs==2.5.8
-hmsclient==0.1.1
-httplib2==0.18.1
-humanize==2.5.0
-hvac==0.10.4
-identify==1.4.25
-idna==2.10
-imagesize==1.2.0
-importlib-metadata==1.7.0
-importlib-resources==3.0.0
-inflection==0.5.0
-ipdb==0.13.3
-ipython-genutils==0.2.0
-ipython==7.9.0
-iso8601==0.1.12
-isodate==0.6.0
-itsdangerous==1.1.0
-jedi==0.17.2
-jira==2.0.0
-jmespath==0.10.0
-json-merge-patch==0.2
-jsondiff==1.1.2
-jsonpatch==1.26
-jsonpickle==1.4.1
-jsonpointer==2.0
-jsonschema==3.2.0
-junit-xml==1.9
-jupyter-client==6.1.6
-jupyter-core==4.6.3
-kombu==4.6.11
-kubernetes==11.0.0
-lazy-object-proxy==1.5.0
-ldap3==2.7
-lockfile==0.12.2
-marshmallow-enum==1.5.1
-marshmallow-sqlalchemy==0.18.0
-marshmallow==2.19.5
-mccabe==0.6.1
-mock==3.0.5
-mongomock==3.19.0
-more-itertools==8.4.0
-moto==1.3.14
-msrest==0.6.17
-msrestazure==0.6.4
-multi-key-dict==2.0.3
-mypy-extensions==0.4.3
-mypy==0.720
-mysqlclient==1.3.14
-natsort==7.0.1
-nbclient==0.1.0
-nbformat==5.0.7
-networkx==2.4
-nodeenv==1.4.0
-nteract-scrapbook==0.4.1
-ntlm-auth==1.5.0
-numpy==1.18.5
-oauthlib==3.1.0
-oscrypto==1.2.0
-packaging==20.4
-pandas-gbq==0.13.2
-pandas==0.25.3
-papermill==2.0.0
-parameterized==0.7.4
-paramiko==2.7.1
-parso==0.7.0
-pathlib2==2.3.5
-pathspec==0.8.0
-pbr==5.4.5
-pendulum==1.4.4
-pexpect==4.8.0
-pickleshare==0.7.5
-pinotdb==0.1.1
-pluggy==0.13.1
-pre-commit==1.21.0
-presto-python-client==0.7.0
-prison==0.1.0
-prometheus-client==0.8.0
-prompt-toolkit==2.0.10
-protobuf==3.12.2
-psutil==5.7.2
-psycopg2-binary==2.8.5
-ptyprocess==0.6.0
-py==1.9.0
-pyOpenSSL==19.1.0
-pyarrow==0.17.1
-pyasn1-modules==0.2.8
-pyasn1==0.4.8
-pycodestyle==2.6.0
-pycparser==2.20
-pycryptodomex==3.9.8
-pydata-google-auth==1.1.0
-pydruid==0.5.8
-pyflakes==2.2.0
-pykerberos==1.2.1
-pymongo==3.10.1
-pymssql==2.1.4
-pyparsing==2.4.7
-pyrsistent==0.16.0
-pysftp==0.2.9
-pytest-cov==2.10.0
-pytest-forked==1.2.0
-pytest-instafail==0.4.2
-pytest-rerunfailures==9.0
-pytest-timeout==1.4.2
-pytest-xdist==1.33.0
-pytest==5.4.3
-python-daemon==2.2.4
-python-dateutil==2.8.1
-python-editor==1.0.4
-python-http-client==3.2.7
-python-jenkins==1.7.0
-python-jose==3.1.0
-python-nvd3==0.15.0
-python-slugify==4.0.1
-python3-openid==3.2.0
-pytz==2020.1
-pytzdata==2020.1
-pywinrm==0.4.1
-pyzmq==19.0.1
-qds-sdk==1.16.0
-redis==3.5.3
-requests-futures==0.9.4
-requests-kerberos==0.12.0
-requests-mock==1.8.0
-requests-ntlm==1.1.0
-requests-oauthlib==1.3.0
-requests-toolbelt==0.9.1
-requests==2.24.0
-responses==0.10.15
-rsa==4.6
-s3transfer==0.3.3
-sasl==0.2.1
-sendgrid==5.6.0
-sentinels==1.0.0
-sentry-sdk==0.16.1
-setproctitle==1.1.10
-six==1.15.0
-slackclient==1.3.2
-snowballstemmer==2.0.0
-snowflake-connector-python==2.2.9
-snowflake-sqlalchemy==1.2.3
-soupsieve==2.0.1
-sphinx-argparse==0.2.5
-sphinx-autoapi==1.0.0
-sphinx-jinja==1.1.1
-sphinx-rtd-theme==0.5.0
-sphinxcontrib-applehelp==1.0.2
-sphinxcontrib-devhelp==1.0.2
-sphinxcontrib-dotnetdomain==0.4
-sphinxcontrib-golangdomain==0.2.0.dev0
-sphinxcontrib-htmlhelp==1.0.3
-sphinxcontrib-httpdomain==1.7.0
-sphinxcontrib-jsmath==1.0.1
-sphinxcontrib-qthelp==1.0.3
-sphinxcontrib-serializinghtml==1.1.4
-sshpubkeys==3.1.0
-sshtunnel==0.1.5
-tabulate==0.8.7
-tenacity==4.12.0
-text-unidecode==1.3
-textwrap3==0.9.2
-thrift-sasl==0.4.2
-thrift==0.13.0
-tokenize-rt==3.2.0
-toml==0.10.1
-tornado==5.1.1
-tqdm==4.48.0
-traitlets==4.3.3
-typed-ast==1.4.1
-typing-extensions==3.7.4.2
-typing==3.7.4.3
-tzlocal==1.5.1
-unicodecsv==0.14.1
-uritemplate==3.0.1
-urllib3==1.25.9
-vertica-python==0.10.4
-vine==1.3.0
-virtualenv==20.0.27
-wcwidth==0.2.5
-websocket-client==0.57.0
-wrapt==1.12.1
-xmltodict==0.12.0
-yamllint==1.24.2
-zdesk==2.7.1
-zipp==1.2.0
-zope.deprecation==4.4.0
diff --git a/requirements/requirements-python3.6.txt b/requirements/requirements-python3.6.txt
deleted file mode 100644
index 3429b87..0000000
--- a/requirements/requirements-python3.6.txt
+++ /dev/null
@@ -1,333 +0,0 @@
-# Editable install with no version control (apache-airflow==1.10.11)
-Babel==2.8.0
-Flask-Admin==1.5.4
-Flask-AppBuilder==2.3.4
-Flask-Babel==1.0.0
-Flask-Bcrypt==0.7.1
-Flask-Caching==1.3.3
-Flask-JWT-Extended==3.24.1
-Flask-Login==0.4.1
-Flask-OpenID==1.2.5
-Flask-SQLAlchemy==2.4.4
-Flask-WTF==0.14.3
-Flask==1.1.2
-JPype1==0.7.1
-JayDeBeApi==1.2.3
-Jinja2==2.11.2
-Mako==1.1.3
-Markdown==2.6.11
-MarkupSafe==1.1.1
-PyHive==0.6.2
-PyJWT==1.7.1
-PyNaCl==1.4.0
-PySmbClient==0.1.5
-PyYAML==5.3.1
-Pygments==2.6.1
-SQLAlchemy-JSONField==0.9.0
-SQLAlchemy-Utils==0.36.8
-SQLAlchemy==1.3.18
-Sphinx==3.1.2
-Unidecode==1.1.1
-WTForms==2.3.1
-Werkzeug==0.16.1
-adal==1.2.4
-alabaster==0.7.12
-alembic==1.4.2
-amqp==2.6.0
-analytics-python==1.2.9
-ansiwrap==0.8.4
-apipkg==1.5
-apispec==1.3.3
-appdirs==1.4.4
-argcomplete==1.12.0
-asn1crypto==1.3.0
-astroid==2.4.2
-async-generator==1.10
-atlasclient==1.0.0
-attrs==19.3.0
-aws-sam-translator==1.25.0
-aws-xray-sdk==2.6.0
-azure-common==1.1.25
-azure-cosmos==3.2.0
-azure-datalake-store==0.0.48
-azure-mgmt-containerinstance==1.5.0
-azure-mgmt-resource==10.1.0
-azure-nspkg==3.0.2
-azure-storage-blob==2.1.0
-azure-storage-common==2.1.0
-azure-storage==0.36.0
-backcall==0.2.0
-bcrypt==3.1.7
-beautifulsoup4==4.7.1
-billiard==3.6.3.0
-black==19.10b0
-blinker==1.4
-boto3==1.14.25
-boto==2.49.0
-botocore==1.17.25
-cached-property==1.5.1
-cachetools==4.1.1
-cassandra-driver==3.20.2
-cattrs==1.0.0
-celery==4.4.6
-certifi==2020.6.20
-cffi==1.14.0
-cfgv==3.1.0
-cfn-lint==0.34.0
-cgroupspy==0.1.6
-chardet==3.0.4
-click==6.7
-cloudant==0.5.10
-colorama==0.4.3
-colorlog==4.0.2
-configparser==3.5.3
-coverage==5.2
-croniter==0.3.34
-cryptography==3.0
-cx-Oracle==8.0.0
-dataclasses==0.7
-datadog==0.38.0
-decorator==4.4.2
-defusedxml==0.6.0
-dill==0.3.2
-distlib==0.3.1
-dnspython==1.16.0
-docker-pycreds==0.4.0
-docker==3.7.3
-docopt==0.6.2
-docutils==0.16
-ecdsa==0.15
-elasticsearch-dsl==5.4.0
-elasticsearch==5.5.3
-email-validator==1.1.1
-entrypoints==0.3
-execnet==1.7.1
-fastavro==0.23.6
-filelock==3.0.12
-flake8-colors==0.1.6
-flake8==3.8.3
-flaky==3.7.0
-flask-swagger==0.2.14
-flower==0.9.5
-freezegun==0.3.15
-fsspec==0.7.4
-funcsigs==1.0.2
-future-fstrings==1.2.0
-future==0.18.2
-gcsfs==0.6.2
-google-api-core==1.22.0
-google-api-python-client==1.10.0
-google-auth-httplib2==0.0.4
-google-auth-oauthlib==0.4.1
-google-auth==1.19.2
-google-cloud-bigquery==1.26.0
-google-cloud-bigtable==1.3.0
-google-cloud-container==2.0.0
-google-cloud-core==1.3.0
-google-cloud-dlp==1.0.0
-google-cloud-language==1.3.0
-google-cloud-secret-manager==1.0.0
-google-cloud-spanner==1.17.1
-google-cloud-speech==1.3.2
-google-cloud-storage==1.29.0
-google-cloud-texttospeech==1.0.1
-google-cloud-translate==2.0.1
-google-cloud-videointelligence==1.15.0
-google-cloud-vision==1.0.0
-google-resumable-media==0.5.1
-googleapis-common-protos==1.52.0
-graphviz==0.14.1
-grpc-google-iam-v1==0.12.3
-grpcio-gcp==0.2.2
-grpcio==1.30.0
-gunicorn==20.0.4
-hdfs==2.5.8
-hmsclient==0.1.1
-httplib2==0.18.1
-humanize==2.5.0
-hvac==0.10.4
-identify==1.4.25
-idna==2.10
-imagesize==1.2.0
-importlib-metadata==1.7.0
-importlib-resources==3.0.0
-inflection==0.5.0
-ipdb==0.13.3
-ipython-genutils==0.2.0
-ipython==7.16.1
-iso8601==0.1.12
-isodate==0.6.0
-itsdangerous==1.1.0
-jedi==0.17.2
-jira==2.0.0
-jmespath==0.10.0
-json-merge-patch==0.2
-jsondiff==1.1.2
-jsonpatch==1.26
-jsonpickle==1.4.1
-jsonpointer==2.0
-jsonschema==3.2.0
-junit-xml==1.9
-jupyter-client==6.1.6
-jupyter-core==4.6.3
-kombu==4.6.11
-kubernetes==11.0.0
-lazy-object-proxy==1.5.0
-ldap3==2.7
-libcst==0.3.7
-lockfile==0.12.2
-marshmallow-enum==1.5.1
-marshmallow-sqlalchemy==0.23.1
-marshmallow==2.21.0
-mccabe==0.6.1
-mock==4.0.2
-mongomock==3.19.0
-more-itertools==8.4.0
-moto==1.3.14
-msrest==0.6.17
-msrestazure==0.6.4
-multi-key-dict==2.0.3
-mypy-extensions==0.4.3
-mypy==0.720
-mysqlclient==1.3.14
-natsort==7.0.1
-nbclient==0.4.1
-nbformat==5.0.7
-nest-asyncio==1.4.0
-networkx==2.4
-nodeenv==1.4.0
-nteract-scrapbook==0.4.1
-ntlm-auth==1.5.0
-numpy==1.19.1
-oauthlib==3.1.0
-oscrypto==1.2.0
-packaging==20.4
-pandas-gbq==0.13.2
-pandas==1.0.5
-papermill==2.1.2
-parameterized==0.7.4
-paramiko==2.7.1
-parso==0.7.0
-pathspec==0.8.0
-pbr==5.4.5
-pendulum==1.4.4
-pexpect==4.8.0
-pickleshare==0.7.5
-pinotdb==0.1.1
-pluggy==0.13.1
-pre-commit==2.6.0
-presto-python-client==0.7.0
-prison==0.1.3
-prometheus-client==0.8.0
-prompt-toolkit==3.0.5
-proto-plus==1.3.2
-protobuf==3.12.2
-psutil==5.7.2
-psycopg2-binary==2.8.5
-ptyprocess==0.6.0
-py==1.9.0
-pyOpenSSL==19.1.0
-pyarrow==0.17.1
-pyasn1-modules==0.2.8
-pyasn1==0.4.8
-pycodestyle==2.6.0
-pycparser==2.20
-pycryptodomex==3.9.8
-pydata-google-auth==1.1.0
-pydruid==0.5.8
-pyflakes==2.2.0
-pykerberos==1.2.1
-pymongo==3.10.1
-pymssql==2.1.4
-pyparsing==2.4.7
-pyrsistent==0.16.0
-pysftp==0.2.9
-pytest-cov==2.10.0
-pytest-forked==1.2.0
-pytest-instafail==0.4.2
-pytest-rerunfailures==9.0
-pytest-timeout==1.4.2
-pytest-xdist==1.33.0
-pytest==5.4.3
-python-daemon==2.2.4
-python-dateutil==2.8.1
-python-editor==1.0.4
-python-http-client==3.2.7
-python-jenkins==1.7.0
-python-jose==3.1.0
-python-nvd3==0.15.0
-python-slugify==4.0.1
-python3-openid==3.2.0
-pytz==2020.1
-pytzdata==2020.1
-pywinrm==0.4.1
-pyzmq==19.0.1
-qds-sdk==1.16.0
-redis==3.5.3
-regex==2020.7.14
-requests-futures==0.9.4
-requests-kerberos==0.12.0
-requests-mock==1.8.0
-requests-ntlm==1.1.0
-requests-oauthlib==1.3.0
-requests-toolbelt==0.9.1
-requests==2.24.0
-responses==0.10.15
-rsa==4.6
-s3transfer==0.3.3
-sasl==0.2.1
-sendgrid==5.6.0
-sentinels==1.0.0
-sentry-sdk==0.16.1
-setproctitle==1.1.10
-six==1.15.0
-slackclient==1.3.2
-snowballstemmer==2.0.0
-snowflake-connector-python==2.2.9
-snowflake-sqlalchemy==1.2.3
-soupsieve==2.0.1
-sphinx-argparse==0.2.5
-sphinx-autoapi==1.0.0
-sphinx-copybutton==0.2.12
-sphinx-jinja==1.1.1
-sphinx-rtd-theme==0.5.0
-sphinxcontrib-applehelp==1.0.2
-sphinxcontrib-devhelp==1.0.2
-sphinxcontrib-dotnetdomain==0.4
-sphinxcontrib-golangdomain==0.2.0.dev0
-sphinxcontrib-htmlhelp==1.0.3
-sphinxcontrib-httpdomain==1.7.0
-sphinxcontrib-jsmath==1.0.1
-sphinxcontrib-qthelp==1.0.3
-sphinxcontrib-serializinghtml==1.1.4
-sshpubkeys==3.1.0
-sshtunnel==0.1.5
-tabulate==0.8.7
-tenacity==4.12.0
-text-unidecode==1.3
-textwrap3==0.9.2
-thrift-sasl==0.4.2
-thrift==0.13.0
-toml==0.10.1
-tornado==5.1.1
-tqdm==4.48.0
-traitlets==4.3.3
-typed-ast==1.4.1
-typing-extensions==3.7.4.2
-typing-inspect==0.6.0
-typing==3.7.4.3
-tzlocal==1.5.1
-unicodecsv==0.14.1
-uritemplate==3.0.1
-urllib3==1.25.9
-vertica-python==0.10.4
-vine==1.3.0
-virtualenv==20.0.27
-wcwidth==0.2.5
-websocket-client==0.57.0
-wrapt==1.12.1
-xmltodict==0.12.0
-yamllint==1.24.2
-zdesk==2.7.1
-zipp==3.1.0
-zope.deprecation==4.4.0
diff --git a/requirements/requirements-python3.7.txt b/requirements/requirements-python3.7.txt
deleted file mode 100644
index ff42b59..0000000
--- a/requirements/requirements-python3.7.txt
+++ /dev/null
@@ -1,330 +0,0 @@
-# Editable install with no version control (apache-airflow==1.10.11)
-Babel==2.8.0
-Flask-Admin==1.5.4
-Flask-AppBuilder==2.3.4
-Flask-Babel==1.0.0
-Flask-Bcrypt==0.7.1
-Flask-Caching==1.3.3
-Flask-JWT-Extended==3.24.1
-Flask-Login==0.4.1
-Flask-OpenID==1.2.5
-Flask-SQLAlchemy==2.4.4
-Flask-WTF==0.14.3
-Flask==1.1.2
-JPype1==0.7.1
-JayDeBeApi==1.2.3
-Jinja2==2.11.2
-Mako==1.1.3
-Markdown==2.6.11
-MarkupSafe==1.1.1
-PyHive==0.6.2
-PyJWT==1.7.1
-PyNaCl==1.4.0
-PySmbClient==0.1.5
-PyYAML==5.3.1
-Pygments==2.6.1
-SQLAlchemy-JSONField==0.9.0
-SQLAlchemy-Utils==0.36.8
-SQLAlchemy==1.3.18
-Sphinx==3.1.2
-Unidecode==1.1.1
-WTForms==2.3.1
-Werkzeug==0.16.1
-adal==1.2.4
-alabaster==0.7.12
-alembic==1.4.2
-amqp==2.6.0
-analytics-python==1.2.9
-ansiwrap==0.8.4
-apipkg==1.5
-apispec==1.3.3
-appdirs==1.4.4
-argcomplete==1.12.0
-asn1crypto==1.3.0
-astroid==2.4.2
-async-generator==1.10
-atlasclient==1.0.0
-attrs==19.3.0
-aws-sam-translator==1.25.0
-aws-xray-sdk==2.6.0
-azure-common==1.1.25
-azure-cosmos==3.2.0
-azure-datalake-store==0.0.48
-azure-mgmt-containerinstance==1.5.0
-azure-mgmt-resource==10.1.0
-azure-nspkg==3.0.2
-azure-storage-blob==2.1.0
-azure-storage-common==2.1.0
-azure-storage==0.36.0
-backcall==0.2.0
-bcrypt==3.1.7
-beautifulsoup4==4.7.1
-billiard==3.6.3.0
-black==19.10b0
-blinker==1.4
-boto3==1.14.25
-boto==2.49.0
-botocore==1.17.25
-cached-property==1.5.1
-cachetools==4.1.1
-cassandra-driver==3.20.2
-cattrs==1.0.0
-celery==4.4.6
-certifi==2020.6.20
-cffi==1.14.0
-cfgv==3.1.0
-cfn-lint==0.34.0
-cgroupspy==0.1.6
-chardet==3.0.4
-click==6.7
-cloudant==0.5.10
-colorama==0.4.3
-colorlog==4.0.2
-configparser==3.5.3
-coverage==5.2
-croniter==0.3.34
-cryptography==3.0
-cx-Oracle==8.0.0
-datadog==0.38.0
-decorator==4.4.2
-defusedxml==0.6.0
-dill==0.3.2
-distlib==0.3.1
-dnspython==1.16.0
-docker-pycreds==0.4.0
-docker==3.7.3
-docopt==0.6.2
-docutils==0.16
-ecdsa==0.15
-elasticsearch-dsl==5.4.0
-elasticsearch==5.5.3
-email-validator==1.1.1
-entrypoints==0.3
-execnet==1.7.1
-fastavro==0.23.6
-filelock==3.0.12
-flake8-colors==0.1.6
-flake8==3.8.3
-flaky==3.7.0
-flask-swagger==0.2.14
-flower==0.9.5
-freezegun==0.3.15
-fsspec==0.7.4
-funcsigs==1.0.2
-future-fstrings==1.2.0
-future==0.18.2
-gcsfs==0.6.2
-google-api-core==1.22.0
-google-api-python-client==1.10.0
-google-auth-httplib2==0.0.4
-google-auth-oauthlib==0.4.1
-google-auth==1.19.2
-google-cloud-bigquery==1.26.0
-google-cloud-bigtable==1.3.0
-google-cloud-container==2.0.0
-google-cloud-core==1.3.0
-google-cloud-dlp==1.0.0
-google-cloud-language==1.3.0
-google-cloud-secret-manager==1.0.0
-google-cloud-spanner==1.17.1
-google-cloud-speech==1.3.2
-google-cloud-storage==1.29.0
-google-cloud-texttospeech==1.0.1
-google-cloud-translate==2.0.1
-google-cloud-videointelligence==1.15.0
-google-cloud-vision==1.0.0
-google-resumable-media==0.5.1
-googleapis-common-protos==1.52.0
-graphviz==0.14.1
-grpc-google-iam-v1==0.12.3
-grpcio-gcp==0.2.2
-grpcio==1.30.0
-gunicorn==20.0.4
-hdfs==2.5.8
-hmsclient==0.1.1
-httplib2==0.18.1
-humanize==2.5.0
-hvac==0.10.4
-identify==1.4.25
-idna==2.10
-imagesize==1.2.0
-importlib-metadata==1.7.0
-inflection==0.5.0
-ipdb==0.13.3
-ipython-genutils==0.2.0
-ipython==7.16.1
-iso8601==0.1.12
-isodate==0.6.0
-itsdangerous==1.1.0
-jedi==0.17.2
-jira==2.0.0
-jmespath==0.10.0
-json-merge-patch==0.2
-jsondiff==1.1.2
-jsonpatch==1.26
-jsonpickle==1.4.1
-jsonpointer==2.0
-jsonschema==3.2.0
-junit-xml==1.9
-jupyter-client==6.1.6
-jupyter-core==4.6.3
-kombu==4.6.11
-kubernetes==11.0.0
-lazy-object-proxy==1.5.0
-ldap3==2.7
-libcst==0.3.7
-lockfile==0.12.2
-marshmallow-enum==1.5.1
-marshmallow-sqlalchemy==0.23.1
-marshmallow==2.21.0
-mccabe==0.6.1
-mock==4.0.2
-mongomock==3.19.0
-more-itertools==8.4.0
-moto==1.3.14
-msrest==0.6.17
-msrestazure==0.6.4
-multi-key-dict==2.0.3
-mypy-extensions==0.4.3
-mypy==0.720
-mysqlclient==1.3.14
-natsort==7.0.1
-nbclient==0.4.1
-nbformat==5.0.7
-nest-asyncio==1.4.0
-networkx==2.4
-nodeenv==1.4.0
-nteract-scrapbook==0.4.1
-ntlm-auth==1.5.0
-numpy==1.19.1
-oauthlib==3.1.0
-oscrypto==1.2.0
-packaging==20.4
-pandas-gbq==0.13.2
-pandas==1.0.5
-papermill==2.1.2
-parameterized==0.7.4
-paramiko==2.7.1
-parso==0.7.0
-pathspec==0.8.0
-pbr==5.4.5
-pendulum==1.4.4
-pexpect==4.8.0
-pickleshare==0.7.5
-pinotdb==0.1.1
-pluggy==0.13.1
-pre-commit==2.6.0
-presto-python-client==0.7.0
-prison==0.1.3
-prometheus-client==0.8.0
-prompt-toolkit==3.0.5
-proto-plus==1.3.2
-protobuf==3.12.2
-psutil==5.7.2
-psycopg2-binary==2.8.5
-ptyprocess==0.6.0
-py==1.9.0
-pyOpenSSL==19.1.0
-pyarrow==0.17.1
-pyasn1-modules==0.2.8
-pyasn1==0.4.8
-pycodestyle==2.6.0
-pycparser==2.20
-pycryptodomex==3.9.8
-pydata-google-auth==1.1.0
-pydruid==0.5.8
-pyflakes==2.2.0
-pykerberos==1.2.1
-pymongo==3.10.1
-pymssql==2.1.4
-pyparsing==2.4.7
-pyrsistent==0.16.0
-pysftp==0.2.9
-pytest-cov==2.10.0
-pytest-forked==1.2.0
-pytest-instafail==0.4.2
-pytest-rerunfailures==9.0
-pytest-timeout==1.4.2
-pytest-xdist==1.33.0
-pytest==5.4.3
-python-daemon==2.2.4
-python-dateutil==2.8.1
-python-editor==1.0.4
-python-http-client==3.2.7
-python-jenkins==1.7.0
-python-jose==3.1.0
-python-nvd3==0.15.0
-python-slugify==4.0.1
-python3-openid==3.2.0
-pytz==2020.1
-pytzdata==2020.1
-pywinrm==0.4.1
-pyzmq==19.0.1
-qds-sdk==1.16.0
-redis==3.5.3
-regex==2020.7.14
-requests-futures==0.9.4
-requests-kerberos==0.12.0
-requests-mock==1.8.0
-requests-ntlm==1.1.0
-requests-oauthlib==1.3.0
-requests-toolbelt==0.9.1
-requests==2.24.0
-responses==0.10.15
-rsa==4.6
-s3transfer==0.3.3
-sasl==0.2.1
-sendgrid==5.6.0
-sentinels==1.0.0
-sentry-sdk==0.16.1
-setproctitle==1.1.10
-six==1.15.0
-slackclient==1.3.2
-snowballstemmer==2.0.0
-snowflake-connector-python==2.2.9
-snowflake-sqlalchemy==1.2.3
-soupsieve==2.0.1
-sphinx-argparse==0.2.5
-sphinx-autoapi==1.0.0
-sphinx-copybutton==0.2.12
-sphinx-jinja==1.1.1
-sphinx-rtd-theme==0.5.0
-sphinxcontrib-applehelp==1.0.2
-sphinxcontrib-devhelp==1.0.2
-sphinxcontrib-dotnetdomain==0.4
-sphinxcontrib-golangdomain==0.2.0.dev0
-sphinxcontrib-htmlhelp==1.0.3
-sphinxcontrib-httpdomain==1.7.0
-sphinxcontrib-jsmath==1.0.1
-sphinxcontrib-qthelp==1.0.3
-sphinxcontrib-serializinghtml==1.1.4
-sshpubkeys==3.1.0
-sshtunnel==0.1.5
-tabulate==0.8.7
-tenacity==4.12.0
-text-unidecode==1.3
-textwrap3==0.9.2
-thrift-sasl==0.4.2
-thrift==0.13.0
-toml==0.10.1
-tornado==5.1.1
-tqdm==4.48.0
-traitlets==4.3.3
-typed-ast==1.4.1
-typing-extensions==3.7.4.2
-typing-inspect==0.6.0
-tzlocal==1.5.1
-unicodecsv==0.14.1
-uritemplate==3.0.1
-urllib3==1.25.9
-vertica-python==0.10.4
-vine==1.3.0
-virtualenv==20.0.27
-wcwidth==0.2.5
-websocket-client==0.57.0
-wrapt==1.12.1
-xmltodict==0.12.0
-yamllint==1.24.2
-zdesk==2.7.1
-zipp==3.1.0
-zope.deprecation==4.4.0
diff --git a/requirements/requirements-python3.8.txt b/requirements/requirements-python3.8.txt
deleted file mode 100644
index e715477..0000000
--- a/requirements/requirements-python3.8.txt
+++ /dev/null
@@ -1,329 +0,0 @@
-# Editable install with no version control (apache-airflow==1.10.11)
-Babel==2.8.0
-Flask-Admin==1.5.4
-Flask-AppBuilder==2.3.4
-Flask-Babel==1.0.0
-Flask-Bcrypt==0.7.1
-Flask-Caching==1.3.3
-Flask-JWT-Extended==3.24.1
-Flask-Login==0.4.1
-Flask-OpenID==1.2.5
-Flask-SQLAlchemy==2.4.4
-Flask-WTF==0.14.3
-Flask==1.1.2
-JPype1==0.7.1
-JayDeBeApi==1.2.3
-Jinja2==2.11.2
-Mako==1.1.3
-Markdown==2.6.11
-MarkupSafe==1.1.1
-PyHive==0.6.2
-PyJWT==1.7.1
-PyNaCl==1.4.0
-PySmbClient==0.1.5
-PyYAML==5.3.1
-Pygments==2.6.1
-SQLAlchemy-JSONField==0.9.0
-SQLAlchemy-Utils==0.36.8
-SQLAlchemy==1.3.18
-Sphinx==3.1.2
-Unidecode==1.1.1
-WTForms==2.3.1
-Werkzeug==0.16.1
-adal==1.2.4
-alabaster==0.7.12
-alembic==1.4.2
-amqp==2.6.0
-analytics-python==1.2.9
-ansiwrap==0.8.4
-apipkg==1.5
-apispec==1.3.3
-appdirs==1.4.4
-argcomplete==1.12.0
-asn1crypto==1.3.0
-astroid==2.4.2
-async-generator==1.10
-atlasclient==1.0.0
-attrs==19.3.0
-aws-sam-translator==1.25.0
-aws-xray-sdk==2.6.0
-azure-common==1.1.25
-azure-cosmos==3.2.0
-azure-datalake-store==0.0.48
-azure-mgmt-containerinstance==1.5.0
-azure-mgmt-resource==10.1.0
-azure-nspkg==3.0.2
-azure-storage-blob==2.1.0
-azure-storage-common==2.1.0
-azure-storage==0.36.0
-backcall==0.2.0
-bcrypt==3.1.7
-beautifulsoup4==4.7.1
-billiard==3.6.3.0
-black==19.10b0
-blinker==1.4
-boto3==1.14.25
-boto==2.49.0
-botocore==1.17.25
-cached-property==1.5.1
-cachetools==4.1.1
-cassandra-driver==3.20.2
-cattrs==1.0.0
-celery==4.4.6
-certifi==2020.6.20
-cffi==1.14.0
-cfgv==3.1.0
-cfn-lint==0.34.0
-cgroupspy==0.1.6
-chardet==3.0.4
-click==6.7
-cloudant==0.5.10
-colorama==0.4.3
-colorlog==4.0.2
-configparser==3.5.3
-coverage==5.2
-croniter==0.3.34
-cryptography==3.0
-cx-Oracle==8.0.0
-datadog==0.38.0
-decorator==4.4.2
-defusedxml==0.6.0
-dill==0.3.2
-distlib==0.3.1
-dnspython==1.16.0
-docker-pycreds==0.4.0
-docker==3.7.3
-docopt==0.6.2
-docutils==0.16
-ecdsa==0.15
-elasticsearch-dsl==5.4.0
-elasticsearch==5.5.3
-email-validator==1.1.1
-entrypoints==0.3
-execnet==1.7.1
-fastavro==0.23.6
-filelock==3.0.12
-flake8-colors==0.1.6
-flake8==3.8.3
-flaky==3.7.0
-flask-swagger==0.2.14
-flower==0.9.5
-freezegun==0.3.15
-fsspec==0.7.4
-funcsigs==1.0.2
-future-fstrings==1.2.0
-future==0.18.2
-gcsfs==0.6.2
-google-api-core==1.22.0
-google-api-python-client==1.10.0
-google-auth-httplib2==0.0.4
-google-auth-oauthlib==0.4.1
-google-auth==1.19.2
-google-cloud-bigquery==1.26.0
-google-cloud-bigtable==1.3.0
-google-cloud-container==2.0.0
-google-cloud-core==1.3.0
-google-cloud-dlp==1.0.0
-google-cloud-language==1.3.0
-google-cloud-secret-manager==1.0.0
-google-cloud-spanner==1.17.1
-google-cloud-speech==1.3.2
-google-cloud-storage==1.29.0
-google-cloud-texttospeech==1.0.1
-google-cloud-translate==2.0.1
-google-cloud-videointelligence==1.15.0
-google-cloud-vision==1.0.0
-google-resumable-media==0.5.1
-googleapis-common-protos==1.52.0
-graphviz==0.14.1
-grpc-google-iam-v1==0.12.3
-grpcio-gcp==0.2.2
-grpcio==1.30.0
-gunicorn==20.0.4
-hdfs==2.5.8
-hmsclient==0.1.1
-httplib2==0.18.1
-humanize==2.5.0
-hvac==0.10.4
-identify==1.4.25
-idna==2.10
-imagesize==1.2.0
-importlib-metadata==1.7.0
-inflection==0.5.0
-ipdb==0.13.3
-ipython-genutils==0.2.0
-ipython==7.16.1
-iso8601==0.1.12
-isodate==0.6.0
-itsdangerous==1.1.0
-jedi==0.17.2
-jira==2.0.0
-jmespath==0.10.0
-json-merge-patch==0.2
-jsondiff==1.1.2
-jsonpatch==1.26
-jsonpickle==1.4.1
-jsonpointer==2.0
-jsonschema==3.2.0
-junit-xml==1.9
-jupyter-client==6.1.6
-jupyter-core==4.6.3
-kombu==4.6.11
-kubernetes==11.0.0
-lazy-object-proxy==1.5.0
-ldap3==2.7
-libcst==0.3.7
-lockfile==0.12.2
-marshmallow-enum==1.5.1
-marshmallow-sqlalchemy==0.23.1
-marshmallow==2.21.0
-mccabe==0.6.1
-mock==4.0.2
-mongomock==3.19.0
-more-itertools==8.4.0
-moto==1.3.14
-msrest==0.6.17
-msrestazure==0.6.4
-multi-key-dict==2.0.3
-mypy-extensions==0.4.3
-mypy==0.720
-mysqlclient==1.3.14
-natsort==7.0.1
-nbclient==0.4.1
-nbformat==5.0.7
-nest-asyncio==1.4.0
-networkx==2.4
-nodeenv==1.4.0
-nteract-scrapbook==0.4.1
-ntlm-auth==1.5.0
-numpy==1.19.1
-oauthlib==3.1.0
-oscrypto==1.2.0
-packaging==20.4
-pandas-gbq==0.13.2
-pandas==1.0.5
-papermill==2.1.2
-parameterized==0.7.4
-paramiko==2.7.1
-parso==0.7.0
-pathspec==0.8.0
-pbr==5.4.5
-pendulum==1.4.4
-pexpect==4.8.0
-pickleshare==0.7.5
-pinotdb==0.1.1
-pluggy==0.13.1
-pre-commit==2.6.0
-presto-python-client==0.7.0
-prison==0.1.3
-prometheus-client==0.8.0
-prompt-toolkit==3.0.5
-proto-plus==1.3.2
-protobuf==3.12.2
-psutil==5.7.2
-psycopg2-binary==2.8.5
-ptyprocess==0.6.0
-py==1.9.0
-pyOpenSSL==19.1.0
-pyarrow==0.17.1
-pyasn1-modules==0.2.8
-pyasn1==0.4.8
-pycodestyle==2.6.0
-pycparser==2.20
-pycryptodomex==3.9.8
-pydata-google-auth==1.1.0
-pydruid==0.5.8
-pyflakes==2.2.0
-pykerberos==1.2.1
-pymongo==3.10.1
-pyparsing==2.4.7
-pyrsistent==0.16.0
-pysftp==0.2.9
-pytest-cov==2.10.0
-pytest-forked==1.2.0
-pytest-instafail==0.4.2
-pytest-rerunfailures==9.0
-pytest-timeout==1.4.2
-pytest-xdist==1.33.0
-pytest==5.4.3
-python-daemon==2.2.4
-python-dateutil==2.8.1
-python-editor==1.0.4
-python-http-client==3.2.7
-python-jenkins==1.7.0
-python-jose==3.1.0
-python-nvd3==0.15.0
-python-slugify==4.0.1
-python3-openid==3.2.0
-pytz==2020.1
-pytzdata==2020.1
-pywinrm==0.4.1
-pyzmq==19.0.1
-qds-sdk==1.16.0
-redis==3.5.3
-regex==2020.7.14
-requests-futures==0.9.4
-requests-kerberos==0.12.0
-requests-mock==1.8.0
-requests-ntlm==1.1.0
-requests-oauthlib==1.3.0
-requests-toolbelt==0.9.1
-requests==2.24.0
-responses==0.10.15
-rsa==4.6
-s3transfer==0.3.3
-sasl==0.2.1
-sendgrid==5.6.0
-sentinels==1.0.0
-sentry-sdk==0.16.1
-setproctitle==1.1.10
-six==1.15.0
-slackclient==1.3.2
-snowballstemmer==2.0.0
-snowflake-connector-python==2.2.9
-snowflake-sqlalchemy==1.2.3
-soupsieve==2.0.1
-sphinx-argparse==0.2.5
-sphinx-autoapi==1.0.0
-sphinx-copybutton==0.2.12
-sphinx-jinja==1.1.1
-sphinx-rtd-theme==0.5.0
-sphinxcontrib-applehelp==1.0.2
-sphinxcontrib-devhelp==1.0.2
-sphinxcontrib-dotnetdomain==0.4
-sphinxcontrib-golangdomain==0.2.0.dev0
-sphinxcontrib-htmlhelp==1.0.3
-sphinxcontrib-httpdomain==1.7.0
-sphinxcontrib-jsmath==1.0.1
-sphinxcontrib-qthelp==1.0.3
-sphinxcontrib-serializinghtml==1.1.4
-sshpubkeys==3.1.0
-sshtunnel==0.1.5
-tabulate==0.8.7
-tenacity==4.12.0
-text-unidecode==1.3
-textwrap3==0.9.2
-thrift-sasl==0.4.2
-thrift==0.13.0
-toml==0.10.1
-tornado==5.1.1
-tqdm==4.48.0
-traitlets==4.3.3
-typed-ast==1.4.1
-typing-extensions==3.7.4.2
-typing-inspect==0.6.0
-tzlocal==1.5.1
-unicodecsv==0.14.1
-uritemplate==3.0.1
-urllib3==1.25.9
-vertica-python==0.10.4
-vine==1.3.0
-virtualenv==20.0.27
-wcwidth==0.2.5
-websocket-client==0.57.0
-wrapt==1.12.1
-xmltodict==0.12.0
-yamllint==1.24.2
-zdesk==2.7.1
-zipp==3.1.0
-zope.deprecation==4.4.0
diff --git a/requirements/setup-2.7.md5 b/requirements/setup-2.7.md5
deleted file mode 100644
index d24fa17..0000000
--- a/requirements/setup-2.7.md5
+++ /dev/null
@@ -1 +0,0 @@
-52a5d9b968ee82e35b5b49ed02361377  /opt/airflow/setup.py
diff --git a/requirements/setup-3.5.md5 b/requirements/setup-3.5.md5
deleted file mode 100644
index d24fa17..0000000
--- a/requirements/setup-3.5.md5
+++ /dev/null
@@ -1 +0,0 @@
-52a5d9b968ee82e35b5b49ed02361377  /opt/airflow/setup.py
diff --git a/requirements/setup-3.6.md5 b/requirements/setup-3.6.md5
deleted file mode 100644
index d24fa17..0000000
--- a/requirements/setup-3.6.md5
+++ /dev/null
@@ -1 +0,0 @@
-52a5d9b968ee82e35b5b49ed02361377  /opt/airflow/setup.py
diff --git a/requirements/setup-3.7.md5 b/requirements/setup-3.7.md5
deleted file mode 100644
index d24fa17..0000000
--- a/requirements/setup-3.7.md5
+++ /dev/null
@@ -1 +0,0 @@
-52a5d9b968ee82e35b5b49ed02361377  /opt/airflow/setup.py
diff --git a/requirements/setup-3.8.md5 b/requirements/setup-3.8.md5
deleted file mode 100644
index d24fa17..0000000
--- a/requirements/setup-3.8.md5
+++ /dev/null
@@ -1 +0,0 @@
-52a5d9b968ee82e35b5b49ed02361377  /opt/airflow/setup.py


[airflow] 40/44: fixup! Include airflow/contrib/executors in the dist package

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 170078914b0efcbd8405f442a978fc08bed2c685
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Dec 23 20:58:45 2020 +0100

    fixup! Include airflow/contrib/executors in the dist package
---
 docs/conf.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/docs/conf.py b/docs/conf.py
index 1728274..b72ee3c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -193,6 +193,8 @@ exclude_patterns = [
     '_api/airflow/contrib/auth',
     '_api/airflow/contrib/example_dags',
     '_api/airflow/contrib/executors/index.rst',
+    '_api/airflow/contrib/executors/kubernetes_executor/index.rst',
+    '_api/airflow/contrib/executors/mesos_executor/index.rst',
     '_api/airflow/contrib/index.rst',
     '_api/airflow/contrib/kubernetes',
     '_api/airflow/contrib/task_runner',


[airflow] 32/44: Click should be limited for Python 2.7

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0841c3b6fa397669c71adacdebc83297cf290e4d
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Dec 23 07:54:09 2020 +0100

    Click should be limited for Python 2.7
---
 setup.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index d0bd0ba..84f4b4e 100644
--- a/setup.py
+++ b/setup.py
@@ -345,7 +345,6 @@ papermill = [
     'pyarrow<1.0.0',
     'fsspec<0.8.0;python_version=="3.5"',
     'black==20.8b0;python_version>="3.6"'  # we need to limit black version as we have click < 7
-
 ]
 password = [
     'bcrypt>=2.0.0',
@@ -602,6 +601,7 @@ INSTALL_REQUIREMENTS = [
     # cattrs >= 1.1.0 dropped support for Python 3.6
     'cattrs>=1.0, <1.1.0;python_version<="3.6"',
     'cattrs>=1.0, <2.0;python_version>"3.6"',
+    'click<8.0.0;python_version<"3.0"',  # click >8 is python 3.6 only but not marked as such yet
     'colorlog==4.0.2',
     'configparser>=3.5.0, <3.6.0',
     'croniter>=0.3.17, <0.4',


[airflow] 11/44: Kubernetes worker pod doesn't use docker container entrypoint (#12766)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4c459106144dfae6b2d66473d0ddf5050d0b2985
Author: Daniel Imberman <da...@gmail.com>
AuthorDate: Mon Dec 7 13:54:45 2020 -0800

    Kubernetes worker pod doesn't use docker container entrypoint (#12766)
    
    * Kubernetes worker pod doesn't use docker container entrypoint
    
    Fixes issue on openshift caused by KubernetesExecutor pods not running
    via the entrypoint script
    
    * fix
    
    * Update UPGRADING_TO_2.0.md
    
    Co-authored-by: Ash Berlin-Taylor <as...@firemirror.com>
    
    * fix UPDGRADING
    
    * @ashb comments
    
    Co-authored-by: Ash Berlin-Taylor <as...@firemirror.com>
    (cherry picked from commit 190066cf201e5b0442bbbd6df74efecae523ee76)
---
 airflow/executors/kubernetes_executor.py            | 2 +-
 chart/templates/scheduler/scheduler-deployment.yaml | 2 +-
 chart/templates/webserver/webserver-deployment.yaml | 2 +-
 chart/tests/test_basic_helm_chart.py                | 2 --
 scripts/in_container/prod/entrypoint_prod.sh        | 7 +++----
 5 files changed, 6 insertions(+), 9 deletions(-)

diff --git a/airflow/executors/kubernetes_executor.py b/airflow/executors/kubernetes_executor.py
index 73dd91e..663a9da 100644
--- a/airflow/executors/kubernetes_executor.py
+++ b/airflow/executors/kubernetes_executor.py
@@ -442,7 +442,7 @@ class AirflowKubernetesScheduler(LoggingMixin):
             try_number=try_number,
             kube_image=self.kube_config.kube_image,
             date=execution_date,
-            command=command,
+            args=command,
             pod_override_object=kube_executor_config,
             base_worker_pod=self.worker_configuration_pod
         )
diff --git a/chart/templates/scheduler/scheduler-deployment.yaml b/chart/templates/scheduler/scheduler-deployment.yaml
index 61dcade..aecc008 100644
--- a/chart/templates/scheduler/scheduler-deployment.yaml
+++ b/chart/templates/scheduler/scheduler-deployment.yaml
@@ -107,7 +107,7 @@ spec:
         - name: scheduler
           image: {{ template "airflow_image" . }}
           imagePullPolicy: {{ .Values.images.airflow.pullPolicy }}
-          args: ["scheduler"]
+          args: ["airflow", "scheduler"]
           envFrom:
           {{- include "custom_airflow_environment_from" . | default "\n  []" | indent 10 }}
           env:
diff --git a/chart/templates/webserver/webserver-deployment.yaml b/chart/templates/webserver/webserver-deployment.yaml
index 25b6b63..ffa911c 100644
--- a/chart/templates/webserver/webserver-deployment.yaml
+++ b/chart/templates/webserver/webserver-deployment.yaml
@@ -94,7 +94,7 @@ spec:
         - name: webserver
           image: {{ template "airflow_image" . }}
           imagePullPolicy: {{ .Values.images.airflow.pullPolicy }}
-          args: ["webserver"]
+          args: ["airflow", "webserver"]
           resources:
 {{ toYaml .Values.webserver.resources | indent 12 }}
           volumeMounts:
diff --git a/chart/tests/test_basic_helm_chart.py b/chart/tests/test_basic_helm_chart.py
index a9cfa16..26ea1c1 100644
--- a/chart/tests/test_basic_helm_chart.py
+++ b/chart/tests/test_basic_helm_chart.py
@@ -113,5 +113,3 @@ class TestBaseChartTest(unittest.TestCase):
             if image.startswith(image_repo):
                 # Make sure that a command is not specified
                 self.assertNotIn("command", obj)
-                # Make sure that the first arg is never airflow
-                self.assertNotEqual(obj["args"][0], "airflow")  # pylint: disable=invalid-sequence-index
diff --git a/scripts/in_container/prod/entrypoint_prod.sh b/scripts/in_container/prod/entrypoint_prod.sh
index 0276e69..136074a 100755
--- a/scripts/in_container/prod/entrypoint_prod.sh
+++ b/scripts/in_container/prod/entrypoint_prod.sh
@@ -111,6 +111,7 @@ else
     verify_db_connection "${AIRFLOW__CORE__SQL_ALCHEMY_CONN}"
 fi
 
+
 # The Bash and python commands still should verify the basic connections so they are run after the
 # DB check but before the broker check
 if [[ ${AIRFLOW_COMMAND} == "bash" ]]; then
@@ -125,7 +126,7 @@ elif [[ ${AIRFLOW_COMMAND} == "airflow" ]]; then
 fi
 
 # Note: the broker backend configuration concerns only a subset of Airflow components
-if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|worker|flower)$ ]]; then
+if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|celery|worker|flower)$ ]]; then
     if [[ -n "${AIRFLOW__CELERY__BROKER_URL_CMD=}" ]]; then
         verify_db_connection "$(eval "$AIRFLOW__CELERY__BROKER_URL_CMD")"
     else
@@ -136,6 +137,4 @@ if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|worker|flower)$ ]]; then
     fi
 fi
 
-
-# Run the command
-exec airflow "${@}"
+exec "airflow" "${@}"


[airflow] 15/44: Update CI to run tests againt v2-0-test branch (#10891)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ad2d9d4b26e9a048393fb2f5e39b73314196dbb9
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Wed Dec 9 19:35:04 2020 +0000

    Update CI to run tests againt v2-0-test branch (#10891)
    
    (cherry picked from commit db166ba75c447a08b94e7be1ab09042fd6361581)
---
 codecov.yml | 72 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 72 insertions(+)

diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 0000000..3559727
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1,72 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+---
+codecov:
+  require_ci_to_pass: true
+  notify:
+    wait_for_ci: false
+
+coverage:
+  precision: 2
+  round: down
+  range: "85...100"
+  status:
+    project:
+      default:
+        # basic
+        target: auto
+        threshold: 0%
+        base: auto
+        paths:
+          - "airflow"
+        # advanced
+        branches:
+          - master
+          - v1-10-stable
+          - v1-10-test
+          - v2-0-test
+        if_not_found: success
+        if_ci_failed: error
+        informational: true
+        only_pulls: false
+    patch:
+      default:
+        # basic
+        target: auto
+        threshold: 0%
+        base: auto
+        # advanced
+        branches:
+          - master
+          - v1-10-stable
+          - v1-10-test
+          - v2-0-test
+        if_no_uploads: error
+        if_not_found: success
+        if_ci_failed: error
+        only_pulls: false
+        paths:
+          - "airflow"
+parsers:
+  gcov:
+    branch_detection:
+      conditional: true
+      loop: true
+      method: false
+      macro: false
+
+comment: false


[airflow] 03/44: Adds --no-rbac-ui flag for Breeze airflow 1.10 installation (#11315)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 201e963219ca6dc5d12fbd6a9878f9685748d00d
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Wed Oct 7 02:00:00 2020 +0200

    Adds --no-rbac-ui flag for Breeze airflow 1.10 installation (#11315)
    
    When installing airflow 1.10 via breeze we now enable rbac
    by default, but we can disable it with --no-rbac-ui flag.
    
    This is useful to test different variants of 1.10 when testing
    release candidataes in connection with the 'start-airflow'
    command.
    
    (cherry picked from commit 22c6a843d760d920f329fc97aa55f45d82682ab9)
    (cherry picked from commit ba6bb8bf3b20d5e2684b0e1a775e2cc9fba33768)
    (cherry picked from commit e9940ab34e715593a2fcaf912fb6ba43605388fb)
---
 scripts/ci/libraries/_docker.env      | 1 +
 scripts/in_container/entrypoint_ci.sh | 2 ++
 2 files changed, 3 insertions(+)

diff --git a/scripts/ci/libraries/_docker.env b/scripts/ci/libraries/_docker.env
index 3290f20..6bc53dc 100644
--- a/scripts/ci/libraries/_docker.env
+++ b/scripts/ci/libraries/_docker.env
@@ -35,3 +35,4 @@ DEFAULT_CONSTRAINTS_BRANCH
 GITHUB_REGISTRY_PULL_IMAGE_TAG
 POSTGRES_VERSION
 MYSQL_VERSION
+DISABLE_RBAC
diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh
index 057d630..a645922 100755
--- a/scripts/in_container/entrypoint_ci.sh
+++ b/scripts/in_container/entrypoint_ci.sh
@@ -117,6 +117,8 @@ unset AIRFLOW__CORE__UNIT_TEST_MODE
 mkdir -pv "${AIRFLOW_HOME}/logs/"
 cp -f "${IN_CONTAINER_DIR}/airflow_ci.cfg" "${AIRFLOW_HOME}/unittests.cfg"
 
+disable_rbac_if_requested
+
 set +e
 "${IN_CONTAINER_DIR}/check_environment.sh"
 ENVIRONMENT_EXIT_CODE=$?


[airflow] 27/44: Production images on CI are now built from packages (#12685)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit fa6c547a0ad42c47f4826a3e88b3d1576d41c0b1
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sun Dec 6 23:36:33 2020 +0100

    Production images on CI are now built from packages (#12685)
    
    So far, the production images of Airflow were using sources
    when they were built on CI. This PR changes that, to build
    airflow + providers packages first and install them
    rather than use sources as installation mechanism.
    
    Part of #12261
    
    (cherry picked from commit ed1825c0264d1f77c4754b722fb3721cbcd779d7)
---
 .dockerignore                                      |  3 +-
 .github/workflows/build-images-workflow-run.yml    |  6 +--
 .github/workflows/scheduled_quarantined.yml        |  1 -
 BREEZE.rst                                         |  8 ++++
 CI.rst                                             | 19 ++-------
 CONTRIBUTING.rst                                   |  2 +-
 Dockerfile.ci                                      | 16 ++++----
 LOCAL_VIRTUALENV.rst                               | 25 ++++++++++++
 breeze                                             |  2 +
 breeze-complete                                    |  4 +-
 docker-context-files/README.md                     |  2 +-
 .../ci_build_airflow_package.sh}                   | 18 ++++++---
 scripts/ci/docker-compose/files.yml                |  1 +
 scripts/ci/docker-compose/local.yml                |  1 -
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh   |  2 +
 scripts/ci/images/ci_verify_prod_image.sh          |  2 +-
 scripts/ci/libraries/_all_libs.sh                  |  2 +
 .../libraries/_build_airflow_packages.sh}          | 32 ++++++++++-----
 scripts/ci/libraries/_initialization.sh            | 22 ++++-------
 scripts/ci/libraries/_local_mounts.sh              |  1 -
 scripts/ci/libraries/_runs.sh                      | 17 ++++++--
 scripts/ci/testing/ci_run_airflow_testing.sh       |  5 +--
 scripts/in_container/_in_container_utils.sh        | 16 --------
 scripts/in_container/check_environment.sh          | 46 ++++++++++++----------
 scripts/in_container/configure_environment.sh      |  1 -
 scripts/in_container/entrypoint_exec.sh            |  1 -
 tests/bats/breeze/test_breeze_complete.bats        |  8 ++++
 27 files changed, 152 insertions(+), 111 deletions(-)

diff --git a/.dockerignore b/.dockerignore
index ac7372b..8a90d74 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -59,8 +59,9 @@
 !.github
 !empty
 
-# This folder is for you if you want to add any files to the docker context when you build your own
+# This folder is for you if you want to add any packages to the docker context when you build your own
 # docker image. most of other files and any new folder you add will be excluded by default
+# if you need other types of files - please add the extensions here.
 !docker-context-files
 
 # Avoid triggering context change on README change (new companies using Airflow)
diff --git a/.github/workflows/build-images-workflow-run.yml b/.github/workflows/build-images-workflow-run.yml
index c5480c6..93b2176 100644
--- a/.github/workflows/build-images-workflow-run.yml
+++ b/.github/workflows/build-images-workflow-run.yml
@@ -23,7 +23,6 @@ on:  # yamllint disable-line rule:truthy
     types: ['requested']
 env:
   MOUNT_LOCAL_SOURCES: "false"
-  MOUNT_FILES: "true"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
@@ -328,7 +327,7 @@ jobs:
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
-          python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
+          python-version: ${{  matrix.python-version }}
         if: steps.defaults.outputs.proceed == 'true'
       - name: >
           Override "scripts/ci" with the "${{ needs.cancel-workflow-runs.outputs.targetBranch }}" branch
@@ -345,7 +344,8 @@ jobs:
         if: steps.defaults.outputs.proceed == 'true'
       - name: "Build CI images ${{ matrix.python-version }}:${{ github.event.workflow_run.id }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-        if: matrix.image-type == 'CI' && steps.defaults.outputs.proceed == 'true'
+        # locally built CI image is needed to prepare packages for PROD image build
+        if: steps.defaults.outputs.proceed == 'true'
       - name: "Push CI images ${{ matrix.python-version }}:${{ github.event.workflow_run.id }}"
         run: ./scripts/ci/images/ci_push_ci_images.sh
         if: matrix.image-type == 'CI' && steps.defaults.outputs.proceed == 'true'
diff --git a/.github/workflows/scheduled_quarantined.yml b/.github/workflows/scheduled_quarantined.yml
index 14dc91d..9877e4c 100644
--- a/.github/workflows/scheduled_quarantined.yml
+++ b/.github/workflows/scheduled_quarantined.yml
@@ -24,7 +24,6 @@ on:  # yamllint disable-line rule:truthy
 
 env:
   MOUNT_LOCAL_SOURCES: "false"
-  MOUNT_FILES: "true"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
diff --git a/BREEZE.rst b/BREEZE.rst
index 43018a0..4c1b3a7 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1653,6 +1653,14 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           Default: 
 
+  -S, --version-suffix-for-pypi SUFFIX
+          Adds optional suffix to the version in the generated backport package. It can be used
+          to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
+
+  -N, --version-suffix-for-svn SUFFIX
+          Adds optional suffix to the generated names of package. It can be used to generate
+          rc1/rc2 ... versions of the packages to be uploaded to SVN.
+
   -v, --verbose
           Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
           debugging - when you run breeze with --verbose flags you will be able to see the commands
diff --git a/CI.rst b/CI.rst
index e087438..0ac1c9d 100644
--- a/CI.rst
+++ b/CI.rst
@@ -135,13 +135,6 @@ You can use those variables when you try to reproduce the build locally.
 |                                         |             |             |            | directories) generated locally on the           |
 |                                         |             |             |            | host during development.                        |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
-| ``MOUNT_FILES``                         |     true    |     true    |    true    | Determines whether "files" folder from          |
-|                                         |             |             |            | sources is mounted as "/files" folder           |
-|                                         |             |             |            | inside the container. This is used to           |
-|                                         |             |             |            | share results of local actions to the           |
-|                                         |             |             |            | host, as well as to pass host files to          |
-|                                         |             |             |            | inside container for local development.         |
-+-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 |                                                           Force variables                                                          |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 | ``FORCE_PULL_IMAGES``                   |    true     |    true     |    true    | Determines if images are force-pulled,          |
@@ -203,7 +196,9 @@ You can use those variables when you try to reproduce the build locally.
 |                                                           Image variables                                                          |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 | ``INSTALL_AIRFLOW_VERSION``             |             |             |            | Installs Airflow version from PyPI when         |
-|                                         |             |             |            | building image.                                 |
+|                                         |             |             |            | building image. Can be "none" to skip airflow   |
+|                                         |             |             |            | installation so that it can be installed from   |
+|                                         |             |             |            | locally prepared packages.                      |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 | ``INSTALL_AIRFLOW_REFERENCE``           |             |             |            | Installs Airflow version from GitHub            |
 |                                         |             |             |            | branch or tag.                                  |
@@ -362,12 +357,6 @@ Note that you need to set "CI" variable to true in order to get the same results
 |                              |                      | [``pull_request``, ``pull_request_target``,         |
 |                              |                      |  ``schedule``, ``push``]                            |
 +------------------------------+----------------------+-----------------------------------------------------+
-| CI_SOURCE_REPO               | ``apache/airflow``   | Source repository. This might be different than the |
-|                              |                      | ``CI_TARGET_REPO`` for pull requests                |
-+------------------------------+----------------------+-----------------------------------------------------+
-| CI_SOURCE_BRANCH             | ``master``           | Branch in the source repository that is used to     |
-|                              |                      | make the pull request.                              |
-+------------------------------+----------------------+-----------------------------------------------------+
 | CI_REF                       | ``refs/head/master`` | Branch in the source repository that is used to     |
 |                              |                      | make the pull request.                              |
 +------------------------------+----------------------+-----------------------------------------------------+
@@ -700,7 +689,7 @@ We also have a script that can help to clean-up the old artifacts:
 CodeQL scan
 -----------
 
-The CodeQL security scan uses GitHub security scan framework to scan our code for security violations.
+The `CodeQL <https://securitylab.github.com/tools/codeql>`_ security scan uses GitHub security scan framework to scan our code for security violations.
 It is run for JavaScript and python code.
 
 Naming conventions for stored images
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index bc5661c..bc64117 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -579,7 +579,7 @@ and not installed together with the core, unless you set ``INSTALL_PROVIDERS_FRO
 variable to ``true``.
 
 In Breeze - which is a development environment, ``INSTALL_PROVIDERS_FROM_SOURCES`` variable is set to true,
-but you can add ``--skip-installing-airflow-providers`` flag to Breeze to skip installing providers when
+but you can add ``--skip-installing-airflow-providers-from-sources`` flag to Breeze to skip installing providers when
 building the images.
 
 One watch-out - providers are still always installed (or rather available) if you install airflow from
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 2210989..67e5bb1 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -261,11 +261,11 @@ ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH}
 ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
 ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES}
 
-ARG AIRFLOW_LOCAL_PIP_WHEELS=""
-ENV AIRFLOW_LOCAL_PIP_WHEELS=${AIRFLOW_LOCAL_PIP_WHEELS}
+ARG INSTALL_FROM_DOCKER_CONTEXT_FILES=""
+ENV INSTALL_FROM_DOCKER_CONTEXT_FILES=${INSTALL_FROM_DOCKER_CONTEXT_FILES}
 
-ARG INSTALL_AIRFLOW_VIA_PIP="true"
-ENV INSTALL_AIRFLOW_VIA_PIP=${INSTALL_AIRFLOW_VIA_PIP}
+ARG INSTALL_FROM_PYPI="true"
+ENV INSTALL_FROM_PYPI=${INSTALL_FROM_PYPI}
 
 RUN pip install --upgrade "pip==${PIP_VERSION}"
 
@@ -317,7 +317,7 @@ ENV UPGRADE_TO_LATEST_CONSTRAINTS=${UPGRADE_TO_LATEST_CONSTRAINTS}
 # Usually we will install versions based on the dependencies in setup.py and upgraded only if needed.
 # But in cron job we will install latest versions matching setup.py to see if there is no breaking change
 # and push the constraints if everything is successful
-RUN if [[ ${INSTALL_AIRFLOW_VIA_PIP} == "true" ]]; then \
+RUN if [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
         if [[ "${UPGRADE_TO_LATEST_CONSTRAINTS}" != "false" ]]; then \
             pip install -e ".[${AIRFLOW_EXTRAS}]" --upgrade --upgrade-strategy eager; \
         else \
@@ -329,9 +329,9 @@ RUN if [[ ${INSTALL_AIRFLOW_VIA_PIP} == "true" ]]; then \
 # they are also installed additionally to whatever is installed from Airflow.
 COPY docker-context-files/ /docker-context-files/
 
-RUN if [[ ${AIRFLOW_LOCAL_PIP_WHEELS} != "true" ]]; then \
-        if ls /docker-context-files/*.whl 1> /dev/null 2>&1; then \
-            pip install --no-deps /docker-context-files/*.whl; \
+RUN if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} != "true" ]]; then \
+        if ls /docker-context-files/*.{whl,tar.gz} 1> /dev/null 2>&1; then \
+            pip install --no-deps /docker-context-files/*.{whl,tar.gz}; \
         fi ; \
     fi
 
diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst
index 574366d..03b60c8 100644
--- a/LOCAL_VIRTUALENV.rst
+++ b/LOCAL_VIRTUALENV.rst
@@ -171,6 +171,31 @@ Activate your virtualenv, e.g. by using ``workon``, and once you are in it, run:
     cd airflow/www
     yarn build
 
+Developing Providers
+--------------------
+
+In Airflow 2.0 we introduced split of Apache Airflow into separate packages - there is one main
+apache-airflow package with core of Airflow and 70+ packages for all providers (external services
+and software Airflow can communicate with).
+
+Developing providers is part of Airflow development, but when you install airflow as editable in your local
+development environment, the corresponding provider packages will be also installed from PyPI. However, the
+providers will also be present in your "airflow/providers" folder. This might lead to confusion,
+which sources of providers are imported during development. It will depend on your
+environment's PYTHONPATH setting in general.
+
+In order to avoid the confusion, you can set ``INSTALL_PROVIDERS_FROM_SOURCES`` environment to ``true``
+before running ``pip install`` command:
+
+.. code-block:: bash
+
+  INSTALL_PROVIDERS_FROM_SOURCES="true" pip install -U -e ".[devel,<OTHER EXTRAS>]" \
+     --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt"
+
+This way no providers packages will be installed and they will always be imported from the "airflow/providers"
+folder.
+
+
 Running Tests
 -------------
 
diff --git a/breeze b/breeze
index f6a45a5..7a26b30 100755
--- a/breeze
+++ b/breeze
@@ -1676,6 +1676,7 @@ ${CMDNAME} prepare-airflow-packages [FLAGS]
 
 Flags:
 $(breeze::flag_packages)
+$(breeze::flag_version_suffix)
 $(breeze::flag_verbosity)
 "
     readonly DETAILED_USAGE_PREPARE_AIRFLOW_PACKAGES
@@ -2140,6 +2141,7 @@ function breeze::flag_local_file_mounting() {
 -l, --skip-mounting-local-sources
         Skips mounting local volume with sources - you get exactly what is in the
         docker image rather than your current local sources of Airflow.
+
 "
 }
 
diff --git a/breeze-complete b/breeze-complete
index 7e1ccc6..042a7fa 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -140,12 +140,12 @@ build-cache-local build-cache-pulled build-cache-disabled disable-pip-cache
 dockerhub-user: dockerhub-repo: github-registry github-repository: github-image-id:
 postgres-version: mysql-version:
 additional-extras: additional-python-deps: additional-dev-deps: additional-runtime-deps: image-tag:
-disable-mysql-client-installation constraints-location: disable-pip-cache add-local-pip-wheels
+disable-mysql-client-installation constraints-location: disable-pip-cache install-from-docker-context-files
 additional-extras: additional-python-deps: skip-installing-airflow-via-pip
 dev-apt-deps: additional-dev-apt-deps: dev-apt-command: additional-dev-apt-command: additional-dev-apt-env:
 runtime-apt-deps: additional-runtime-apt-deps: runtime-apt-command: additional-runtime-apt-command: additional-runtime-apt-env:
 load-default-connections load-example-dags
-install-wheels no-rbac-ui
+install-packages-from-dist no-rbac-ui package-format:
 test-type:
 preserve-volumes
 "
diff --git a/docker-context-files/README.md b/docker-context-files/README.md
index 52213cb..07a6c22 100644
--- a/docker-context-files/README.md
+++ b/docker-context-files/README.md
@@ -26,7 +26,7 @@ th [docker-context-files](.) folder to the image context - in case of production
 the build segment, co content of the folder is available in the `/docker-context-file` folder inside
 the build image. You can store constraint files and wheel
 packages there that you want to install as PYPI packages and refer to those packages using
-`--constraint-location` flag for constraints or by using `--add-local-pip-wheels` flag.
+`--constraint-location` flag for constraints or by using `--install-from-local-files-when-building` flag.
 
 By default, the content of this folder is .gitignored so that any binaries and files you put here are only
 used for local builds and not committed to the repository.
diff --git a/scripts/ci/docker-compose/files.yml b/scripts/ci/build_airflow/ci_build_airflow_package.sh
old mode 100644
new mode 100755
similarity index 67%
copy from scripts/ci/docker-compose/files.yml
copy to scripts/ci/build_airflow/ci_build_airflow_package.sh
index 5625ca6..593d94a
--- a/scripts/ci/docker-compose/files.yml
+++ b/scripts/ci/build_airflow/ci_build_airflow_package.sh
@@ -1,3 +1,4 @@
+#!/usr/bin/env bash
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -14,9 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
----
-version: "2.2"
-services:
-  airflow:
-    volumes:
-      - ../../../files:/files:cached
+# shellcheck source=scripts/ci/libraries/_script_init.sh
+. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+
+build_airflow_packages::build_airflow_packages
+
+cd "${AIRFLOW_SOURCES}/dist" || exit 1
+
+dump_file="/tmp/airflow_$(date +"%Y%m%d-%H%M%S").tar.gz"
+tar -cvzf "${dump_file}" .
+
+echo "Airflow is in dist and also tar-gzipped in ${dump_file}"
diff --git a/scripts/ci/docker-compose/files.yml b/scripts/ci/docker-compose/files.yml
index 5625ca6..2925bd9 100644
--- a/scripts/ci/docker-compose/files.yml
+++ b/scripts/ci/docker-compose/files.yml
@@ -20,3 +20,4 @@ services:
   airflow:
     volumes:
       - ../../../files:/files:cached
+      - ../../../dist:/dist:cached
diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml
index 20ce4d3..1fb6200 100644
--- a/scripts/ci/docker-compose/local.yml
+++ b/scripts/ci/docker-compose/local.yml
@@ -43,7 +43,6 @@ services:
       - ../../../dags:/opt/airflow/dags:cached
       - ../../../dev:/opt/airflow/dev:cached
       - ../../../docs:/opt/airflow/docs:cached
-      - ../../../dist:/dist:cached
       - ../../../hooks:/opt/airflow/hooks:cached
       - ../../../logs:/root/airflow/logs:cached
       - ../../../pytest.ini:/opt/airflow/pytest.ini:cached
diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
index 700487c..43defdf 100755
--- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
@@ -23,6 +23,8 @@ export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
 export DOCKER_CACHE="local"
 export VERBOSE="true"
 
+export INSTALLED_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
+readonly INSTALLED_EXTRAS
 
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
diff --git a/scripts/ci/images/ci_verify_prod_image.sh b/scripts/ci/images/ci_verify_prod_image.sh
index 30f9def..ec529b4 100755
--- a/scripts/ci/images/ci_verify_prod_image.sh
+++ b/scripts/ci/images/ci_verify_prod_image.sh
@@ -62,7 +62,7 @@ function verify_prod_image_dependencies {
     docker run --rm --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" -c 'pip check'
     local res=$?
     if [[ ${res} != "0" ]]; then
-        echo  "${COLOR_RED_ERROR} ^^^ Some dependencies are conflicting. See instructions below on how to deal with it.  ${COLOR_RESET}"
+        echo "${COLOR_RED_ERROR} ^^^ Some dependencies are conflicting. See instructions below on how to deal with it.  ${COLOR_RESET}"
         echo
         build_images::inform_about_pip_check "--production "
         # TODO(potiuk) - enable the comment once https://github.com/apache/airflow/pull/12188 is merged
diff --git a/scripts/ci/libraries/_all_libs.sh b/scripts/ci/libraries/_all_libs.sh
index 43e2e23..d676e77 100755
--- a/scripts/ci/libraries/_all_libs.sh
+++ b/scripts/ci/libraries/_all_libs.sh
@@ -32,6 +32,8 @@ readonly SCRIPTS_CI_DIR
 . "${LIBRARIES_DIR}"/_repeats.sh
 # shellcheck source=scripts/ci/libraries/_sanity_checks.sh
 . "${LIBRARIES_DIR}"/_sanity_checks.sh
+# shellcheck source=scripts/ci/libraries/_build_airflow_packages.sh
+. "${LIBRARIES_DIR}"/_build_airflow_packages.sh
 # shellcheck source=scripts/ci/libraries/_build_images.sh
 . "${LIBRARIES_DIR}"/_build_images.sh
 # shellcheck source=scripts/ci/libraries/_kind.sh
diff --git a/scripts/in_container/entrypoint_exec.sh b/scripts/ci/libraries/_build_airflow_packages.sh
old mode 100755
new mode 100644
similarity index 54%
copy from scripts/in_container/entrypoint_exec.sh
copy to scripts/ci/libraries/_build_airflow_packages.sh
index 728872b..3ec1bca
--- a/scripts/in_container/entrypoint_exec.sh
+++ b/scripts/ci/libraries/_build_airflow_packages.sh
@@ -16,16 +16,30 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# shellcheck source=scripts/in_container/_in_container_script_init.sh
-. /opt/airflow/scripts/in_container/_in_container_script_init.sh
+# Build airflow packages
+function build_airflow_packages::build_airflow_packages() {
+    rm -rf -- *egg-info*
+    rm -rf -- build
 
-# shellcheck source=scripts/in_container/configure_environment.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/configure_environment.sh"
+    pip install --upgrade "pip==${PIP_VERSION}" "wheel==${WHEEL_VERSION}"
 
-# shellcheck source=scripts/in_container/run_init_script.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/run_init_script.sh"
+    local packages=()
 
-# shellcheck source=scripts/in_container/run_tmux.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/run_tmux.sh"
+    if [[ ${PACKAGE_FORMAT} == "wheel" || ${PACKAGE_FORMAT} == "both" ]] ; then
+        packages+=("bdist_wheel")
+    fi
+    if [[ ${PACKAGE_FORMAT} == "sdist" || ${PACKAGE_FORMAT} == "both" ]] ; then
+        packages+=("sdist")
+    fi
 
-exec /bin/bash "${@}"
+    # Prepare airflow's wheel
+    python setup.py compile_assets "${packages[@]}"
+
+    # clean-up
+    rm -rf -- *egg-info*
+    rm -rf -- build
+
+    echo
+    echo "Airflow package prepared: ${PACKAGE_FORMAT}"
+    echo
+}
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 50cdbf0..40437dd 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -126,9 +126,9 @@ function initialization::initialize_base_variables() {
     # If set to true, RBAC UI will not be used for 1.10 version
     export DISABLE_RBAC=${DISABLE_RBAC:="false"}
 
-    # if set to true, the ci image will look for wheel packages in dist folder and will install them
+    # if set to true, the ci image will look for packages in dist folder and will install them
     # during entering the container
-    export INSTALL_WHEELS=${INSTALL_WHEELS:="false"}
+    export INSTALL_PACKAGES_FROM_DIST=${INSTALL_PACKAGES_FROM_DIST:="false"}
 
     # If set the specified file will be used to initialize Airflow after the environment is created,
     # otherwise it will use files/airflow-breeze-config/init.sh
@@ -235,9 +235,6 @@ function initialization::initialize_mount_variables() {
     # Whether necessary for airflow run local sources are mounted to docker
     export MOUNT_LOCAL_SOURCES=${MOUNT_LOCAL_SOURCES:="true"}
 
-    # Whether files folder from local sources are mounted to docker
-    export MOUNT_FILES=${MOUNT_FILES:="true"}
-
     if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then
         verbosity::print_info
         verbosity::print_info "Mounting necessary host volumes to Docker"
@@ -249,14 +246,9 @@ function initialization::initialize_mount_variables() {
         verbosity::print_info
     fi
 
-    if [[ ${MOUNT_FILES} == "true" ]]; then
-        verbosity::print_info
-        verbosity::print_info "Mounting files folder to Docker"
-        verbosity::print_info
-        EXTRA_DOCKER_FLAGS+=("-v" "${AIRFLOW_SOURCES}/files:/files")
-    fi
-
     EXTRA_DOCKER_FLAGS+=(
+        "-v" "${AIRFLOW_SOURCES}/files:/files"
+        "-v" "${AIRFLOW_SOURCES}/dist:/dist"
         "--rm"
         "--env-file" "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env"
     )
@@ -383,10 +375,10 @@ function initialization::initialize_image_build_variables() {
 
     # whether installation of Airflow should be done via PIP. You can set it to false if you have
     # all the binary packages (including airflow) in the docker-context-files folder and use
-    # AIRFLOW_LOCAL_PIP_WHEELS="true" to install it from there.
-    export INSTALL_AIRFLOW_VIA_PIP="${INSTALL_AIRFLOW_VIA_PIP:="true"}"
+    # INSTALL_FROM_DOCKER_CONTEXT_FILES="true" to install it from there.
+    export INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI:="true"}"
     # whether installation should be performed from the local wheel packages in "docker-context-files" folder
-    export AIRFLOW_LOCAL_PIP_WHEELS="${AIRFLOW_LOCAL_PIP_WHEELS:="false"}"
+    export INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES:="false"}"
     # reference to CONSTRAINTS. they can be overwritten manually or replaced with AIRFLOW_CONSTRAINTS_LOCATION
     export AIRFLOW_CONSTRAINTS_REFERENCE="${AIRFLOW_CONSTRAINTS_REFERENCE:=""}"
     # direct constraints Location - can be URL or path to local file. If empty, it will be calculated
diff --git a/scripts/ci/libraries/_local_mounts.sh b/scripts/ci/libraries/_local_mounts.sh
index 39790c9..0ccd250 100644
--- a/scripts/ci/libraries/_local_mounts.sh
+++ b/scripts/ci/libraries/_local_mounts.sh
@@ -39,7 +39,6 @@ function local_mounts::generate_local_mounts_list {
         "$prefix"dags:/opt/airflow/dags:cached
         "$prefix"dev:/opt/airflow/dev:cached
         "$prefix"docs:/opt/airflow/docs:cached
-        "$prefix"dist:/dist:cached
         "$prefix"hooks:/opt/airflow/hooks:cached
         "$prefix"logs:/root/airflow/logs:cached
         "$prefix"pytest.ini:/opt/airflow/pytest.ini:cached
diff --git a/scripts/ci/libraries/_runs.sh b/scripts/ci/libraries/_runs.sh
index 7b4e51e..17ab1ee 100644
--- a/scripts/ci/libraries/_runs.sh
+++ b/scripts/ci/libraries/_runs.sh
@@ -19,9 +19,10 @@
 # Docker command to build documentation
 function runs::run_docs() {
     docker run "${EXTRA_DOCKER_FLAGS[@]}" -t \
-            --entrypoint "/usr/local/bin/dumb-init"  \
-            "${AIRFLOW_CI_IMAGE}" \
-            "--" "/opt/airflow/scripts/in_container/run_docs_build.sh" "${@}"
+        -e "GITHUB_ACTIONS=${GITHUB_ACTIONS="false"}" \
+        --entrypoint "/usr/local/bin/dumb-init"  \
+        "${AIRFLOW_CI_IMAGE}" \
+        "--" "/opt/airflow/scripts/in_container/run_docs_build.sh" "${@}"
 }
 
 
@@ -32,3 +33,13 @@ function runs::run_generate_constraints() {
         "${AIRFLOW_CI_IMAGE}" \
         "--" "/opt/airflow/scripts/in_container/run_generate_constraints.sh"
 }
+
+# Docker command to prepare provider packages
+function runs::run_prepare_airflow_packages() {
+    docker run "${EXTRA_DOCKER_FLAGS[@]}" \
+        --entrypoint "/usr/local/bin/dumb-init"  \
+        -t \
+        -v "${AIRFLOW_SOURCES}:/opt/airflow" \
+        "${AIRFLOW_CI_IMAGE}" \
+        "--" "/opt/airflow/scripts/in_container/run_prepare_airflow_packages.sh" "${@}"
+}
diff --git a/scripts/ci/testing/ci_run_airflow_testing.sh b/scripts/ci/testing/ci_run_airflow_testing.sh
index 39f1501..8bc6169 100755
--- a/scripts/ci/testing/ci_run_airflow_testing.sh
+++ b/scripts/ci/testing/ci_run_airflow_testing.sh
@@ -103,15 +103,12 @@ build_images::rebuild_ci_image_if_needed
 
 initialization::set_mysql_encoding
 
-DOCKER_COMPOSE_LOCAL=()
+DOCKER_COMPOSE_LOCAL=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml")
 
 if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then
     DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml")
 fi
 
-if [[ ${MOUNT_FILES} == "true" ]]; then
-    DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml")
-fi
 
 if [[ ${GITHUB_ACTIONS} == "true" ]]; then
     DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/ga.yml")
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index 1dbcdd4..f1a55c8 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -260,22 +260,6 @@ function uninstall_airflow() {
     find /root/airflow/ -type f -print0 | xargs -0 rm -f --
 }
 
-function uninstall_providers() {
-    echo
-    echo "Uninstalling all provider packages"
-    echo
-    local provider_packages_to_uninstall
-    provider_packages_to_uninstall=$(pip freeze | grep apache-airflow-providers || true)
-    if [[ -n ${provider_packages_to_uninstall} ]]; then
-        echo "${provider_packages_to_uninstall}" | xargs pip uninstall -y || true 2>/dev/null
-    fi
-}
-
-function uninstall_airflow_and_providers() {
-    uninstall_providers
-    uninstall_airflow
-}
-
 function install_released_airflow_version() {
     local version="${1}"
     local extras="${2}"
diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh
index 9ab400a..258036f 100755
--- a/scripts/in_container/check_environment.sh
+++ b/scripts/in_container/check_environment.sh
@@ -20,13 +20,14 @@ EXIT_CODE=0
 
 DISABLED_INTEGRATIONS=""
 
-function check_service() {
-    INTEGRATION_NAME=$1
+function check_service {
+    LABEL=$1
     CALL=$2
     MAX_CHECK=${3:=1}
 
-    echo -n "${INTEGRATION_NAME}: "
-    while true; do
+    echo -n "${LABEL}: "
+    while true
+    do
         set +e
         LAST_CHECK_RESULT=$(eval "${CALL}" 2>&1)
         RES=$?
@@ -36,7 +37,7 @@ function check_service() {
             break
         else
             echo -n "."
-            MAX_CHECK=$((MAX_CHECK - 1))
+            MAX_CHECK=$((MAX_CHECK-1))
         fi
         if [[ ${MAX_CHECK} == 0 ]]; then
             echo "${COLOR_RED_ERROR} Maximum number of retries while checking service. Exiting ${COLOR_RESET}"
@@ -55,8 +56,11 @@ function check_service() {
     fi
 }
 
-function check_integration() {
-    INTEGRATION_NAME=$1
+function check_integration {
+    INTEGRATION_LABEL=$1
+    INTEGRATION_NAME=$2
+    CALL=$3
+    MAX_CHECK=${4:=1}
 
     ENV_VAR_NAME=INTEGRATION_${INTEGRATION_NAME^^}
     if [[ ${!ENV_VAR_NAME:=} != "true" ]]; then
@@ -65,16 +69,16 @@ function check_integration() {
         fi
         return
     fi
-    check_service "${@}"
+    check_service "${INTEGRATION_LABEL}" "${CALL}" "${MAX_CHECK}"
 }
 
-function check_db_backend() {
+function check_db_backend {
     MAX_CHECK=${1:=1}
 
     if [[ ${BACKEND} == "postgres" ]]; then
-        check_service "postgres" "nc -zvv postgres 5432" "${MAX_CHECK}"
+        check_service "PostgreSQL" "nc -zvv postgres 5432" "${MAX_CHECK}"
     elif [[ ${BACKEND} == "mysql" ]]; then
-        check_service "mysql" "nc -zvv mysql 3306" "${MAX_CHECK}"
+        check_service "MySQL" "nc -zvv mysql 3306" "${MAX_CHECK}"
     elif [[ ${BACKEND} == "sqlite" ]]; then
         return
     else
@@ -119,7 +123,8 @@ function startairflow_if_requested() {
             airflow initdb
             airflow create_user -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email || true
         else
-            airflow create_user -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email
+            airflow db init
+            airflow users create -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email
         fi
 
         # shellcheck source=scripts/in_container/run_init_script.sh
@@ -136,13 +141,14 @@ if [[ -n ${BACKEND=} ]]; then
     check_db_backend 20
     echo "-----------------------------------------------------------------------------------------------"
 fi
-check_integration kerberos "nc -zvv kerberos 88" 30
-check_integration mongo "nc -zvv mongo 27017" 20
-check_integration redis "nc -zvv redis 6379" 20
-check_integration rabbitmq "nc -zvv rabbitmq 5672" 20
-check_integration cassandra "nc -zvv cassandra 9042" 20
-check_integration openldap "nc -zvv openldap 389" 20
-check_integration presto "nc -zvv presto 8080" 40
+check_integration "Kerberos" "kerberos" "nc -zvv kerberos 88" 30
+check_integration "MongoDB" "mongo" "nc -zvv mongo 27017" 20
+check_integration "Redis" "redis" "nc -zvv redis 6379" 20
+check_integration "RabbitMQ" "rabbitmq" "nc -zvv rabbitmq 5672" 20
+check_integration "Cassandra" "cassandra" "nc -zvv cassandra 9042" 20
+check_integration "OpenLDAP" "openldap" "nc -zvv openldap 389" 20
+check_integration "Presto (API)" "presto" \
+    "curl --max-time 1 http://presto:8080/v1/info/ | grep '\"starting\":false'" 20
 echo "-----------------------------------------------------------------------------------------------"
 
 if [[ ${EXIT_CODE} != 0 ]]; then
@@ -165,5 +171,3 @@ if [[ -n ${DISABLED_INTEGRATIONS=} ]]; then
     echo "Enable them via --integration <INTEGRATION_NAME> flags (you can use 'all' for all)"
     echo
 fi
-
-exit 0
diff --git a/scripts/in_container/configure_environment.sh b/scripts/in_container/configure_environment.sh
index 3fe7858..01c3487 100644
--- a/scripts/in_container/configure_environment.sh
+++ b/scripts/in_container/configure_environment.sh
@@ -16,7 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 # Script to check licences for all code. Can be started from any working directory
-# shellcheck source=scripts/in_container/_in_container_script_init.sh
 export FILES_DIR="/files"
 export AIRFLOW_BREEZE_CONFIG_DIR="${FILES_DIR}/airflow-breeze-config"
 VARIABLES_ENV_FILE="variables.env"
diff --git a/scripts/in_container/entrypoint_exec.sh b/scripts/in_container/entrypoint_exec.sh
index 728872b..4423810 100755
--- a/scripts/in_container/entrypoint_exec.sh
+++ b/scripts/in_container/entrypoint_exec.sh
@@ -15,7 +15,6 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 # shellcheck source=scripts/in_container/_in_container_script_init.sh
 . /opt/airflow/scripts/in_container/_in_container_script_init.sh
 
diff --git a/tests/bats/breeze/test_breeze_complete.bats b/tests/bats/breeze/test_breeze_complete.bats
index 2e9d1da..3ca32ce 100644
--- a/tests/bats/breeze/test_breeze_complete.bats
+++ b/tests/bats/breeze/test_breeze_complete.bats
@@ -272,3 +272,11 @@
 
   assert_equal "" "${TEST_TYPE}"
 }
+
+@test "Test default package format is wheel" {
+  load ../bats_utils
+  #shellcheck source=breeze-complete
+  source "${AIRFLOW_SOURCES}/breeze-complete"
+
+  assert_equal "wheel" "${PACKAGE_FORMAT}"
+}


[airflow] 08/44: Artifacts in Github Action have a short retention period (#12793)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0c21aa15bfd58fc6ca57afe5cfebdbf24604a3d5
Author: Kamil Breguła <mi...@users.noreply.github.com>
AuthorDate: Fri Dec 4 08:37:38 2020 +0100

    Artifacts in Github Action have a short retention period (#12793)
    
    (cherry picked from commit fa4fc37b966b331377257fe964824914524dba96)
---
 .github/workflows/ci.yml                    | 19 ++++++++++++++++++
 .github/workflows/delete_old_artifacts.yml  | 31 -----------------------------
 .github/workflows/scheduled_quarantined.yml |  2 ++
 3 files changed, 21 insertions(+), 31 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 9655955..44ffc7b 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -323,18 +323,21 @@ jobs:
         with:
           name: airflow-logs-helm
           path: "./files/airflow_logs*"
+          retention-days: 7
       - name: "Upload container logs"
         uses: actions/upload-artifact@v2
         if: failure()
         with:
           name: container-logs-helm
           path: "./files/container_logs*"
+          retention-days: 7
       - name: "Upload artifact for coverage"
         uses: actions/upload-artifact@v2
         with:
           name: >
             coverage-helm
           path: "./files/coverage.xml"
+          retention-days: 7
 
   tests-postgres:
     timeout-minutes: 80
@@ -376,18 +379,21 @@ jobs:
         with:
           name: airflow-logs-${{matrix.python-version}}-${{matrix.postgres-version}}
           path: "./files/airflow_logs*"
+          retention-days: 7
       - name: "Upload container logs"
         uses: actions/upload-artifact@v2
         if: failure()
         with:
           name: container-logs-postgres-${{matrix.python-version}}-${{matrix.postgres-version}}
           path: "./files/container_logs*"
+          retention-days: 7
       - name: "Upload artifact for coverage"
         uses: actions/upload-artifact@v2
         with:
           name: >
             coverage-postgres-${{matrix.python-version}}-${{matrix.postgres-version}}
           path: "./files/coverage.xml"
+          retention-days: 7
 
   tests-mysql:
     timeout-minutes: 80
@@ -428,17 +434,20 @@ jobs:
         with:
           name: airflow-logs-${{matrix.python-version}}-${{matrix.mysql-version}}
           path: "./files/airflow_logs*"
+          retention-days: 7
       - name: "Upload container logs"
         uses: actions/upload-artifact@v2
         if: failure()
         with:
           name: container-logs-mysql-${{matrix.python-version}}-${{matrix.mysql-version}}
           path: "./files/container_logs*"
+          retention-days: 7
       - name: "Upload artifact for coverage"
         uses: actions/upload-artifact@v2
         with:
           name: coverage-mysql-${{matrix.python-version}}-${{matrix.mysql-version}}
           path: "./files/coverage.xml"
+          retention-days: 7
 
   tests-sqlite:
     timeout-minutes: 60
@@ -477,17 +486,20 @@ jobs:
         with:
           name: airflow-logs-${{matrix.python-version}}
           path: './files/airflow_logs*'
+          retention-days: 7
       - name: "Upload container logs"
         uses: actions/upload-artifact@v2
         if: failure()
         with:
           name: container-logs-sqlite-${{matrix.python-version}}
           path: "./files/container_logs*"
+          retention-days: 7
       - name: "Upload artifact for coverage"
         uses: actions/upload-artifact@v2
         with:
           name: coverage-sqlite-${{matrix.python-version}}
           path: ./files/coverage.xml
+          retention-days: 7
 
   tests-quarantined:
     timeout-minutes: 60
@@ -543,23 +555,27 @@ jobs:
         with:
           name: quarantined_tests
           path: "files/test_result.xml"
+          retention-days: 7
       - name: "Upload airflow logs"
         uses: actions/upload-artifact@v2
         if: failure()
         with:
           name: airflow-logs-quarantined-${{ matrix.backend }}
           path: "./files/airflow_logs*"
+          retention-days: 7
       - name: "Upload container logs"
         uses: actions/upload-artifact@v2
         if: failure()
         with:
           name: container-logs-quarantined-${{ matrix.backend }}
           path: "./files/container_logs*"
+          retention-days: 7
       - name: "Upload artifact for coverage"
         uses: actions/upload-artifact@v2
         with:
           name: coverage-quarantined-${{ matrix.backend }}
           path: "./files/coverage.xml"
+          retention-days: 7
 
   upload-coverage:
     timeout-minutes: 5
@@ -705,12 +721,14 @@ jobs:
           name: >
             kind-logs-${{matrix.kubernetes-mode}}-${{matrix.python-version}}-${{matrix.kubernetes-version}}
           path: /tmp/kind_logs_*
+          retention-days: 7
       - name: "Upload artifact for coverage"
         uses: actions/upload-artifact@v2
         with:
           name: >
             coverage-k8s-${{matrix.kubernetes-mode}}-${{matrix.python-version}}-${{matrix.kubernetes-version}}
           path: "./files/coverage.xml"
+          retention-days: 7
 
   push-prod-images-to-github-registry:
     timeout-minutes: 10
@@ -818,6 +836,7 @@ jobs:
         with:
           name: 'constraints-${{matrix.python-version}}'
           path: './files/constraints-${{matrix.python-version}}/constraints-${{matrix.python-version}}.txt'
+          retention-days: 7
 
   constraints-push:
     timeout-minutes: 10
diff --git a/.github/workflows/delete_old_artifacts.yml b/.github/workflows/delete_old_artifacts.yml
deleted file mode 100644
index 98329d5..0000000
--- a/.github/workflows/delete_old_artifacts.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
----
-name: 'Delete old artifacts'
-on:  # yamllint disable-line rule:truthy
-  schedule:
-    - cron: '27 */6 * * *'  # run every 6 hours
-
-jobs:
-  delete-artifacts:
-    runs-on: ubuntu-20.04
-    steps:
-      - uses: kolpav/purge-artifacts-action@04c636a505f26ebc82f8d070b202fb87ff572b10  # v1.0
-        with:
-          token: ${{ secrets.GITHUB_TOKEN }}
-          expire-in: 7days  # Setting this to 0 will delete all artifacts
diff --git a/.github/workflows/scheduled_quarantined.yml b/.github/workflows/scheduled_quarantined.yml
index cf29c38..14dc91d 100644
--- a/.github/workflows/scheduled_quarantined.yml
+++ b/.github/workflows/scheduled_quarantined.yml
@@ -107,9 +107,11 @@ jobs:
         with:
           name: 'quarantined_tests'
           path: 'files/test_result.xml'
+          retention-days: 7
       - uses: actions/upload-artifact@v2
         name: Upload airflow logs
         if: always()
         with:
           name: airflow-logs-quarantined-${{matrix.python-version}}-${{ matrix.postgres-version }}
           path: './files/airflow_logs*'
+          retention-days: 7


[airflow] 24/44: fixup! fixup! Install airflow and providers from dist and verifies them (#13033)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit a1584c253830934833eccf996b6804934c192ba2
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Tue Dec 15 15:18:45 2020 +0100

    fixup! fixup! Install airflow and providers from dist and verifies them  (#13033)
---
 scripts/in_container/check_environment.sh     | 6 +-----
 scripts/in_container/configure_environment.sh | 2 --
 scripts/in_container/entrypoint_exec.sh       | 4 ++++
 3 files changed, 5 insertions(+), 7 deletions(-)

diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh
index 2282304..9ab400a 100755
--- a/scripts/in_container/check_environment.sh
+++ b/scripts/in_container/check_environment.sh
@@ -16,9 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 # Script to check licences for all code. Can be started from any working directory
-# shellcheck source=scripts/in_container/_in_container_script_init.sh
-. "$(dirname "${BASH_SOURCE[0]}")/_in_container_script_init.sh"
-
 EXIT_CODE=0
 
 DISABLED_INTEGRATIONS=""
@@ -117,8 +114,6 @@ function startairflow_if_requested() {
         export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS}
         export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES}
 
-        . "$( dirname "${BASH_SOURCE[0]}" )/configure_environment.sh"
-
         # initialize db and create the admin user if it's a new run
         if [[ ${RUN_AIRFLOW_1_10} == "true" ]]; then
             airflow initdb
@@ -127,6 +122,7 @@ function startairflow_if_requested() {
             airflow create_user -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email
         fi
 
+        # shellcheck source=scripts/in_container/run_init_script.sh
         . "$( dirname "${BASH_SOURCE[0]}" )/run_init_script.sh"
 
     fi
diff --git a/scripts/in_container/configure_environment.sh b/scripts/in_container/configure_environment.sh
index 4ec0d6a..3fe7858 100644
--- a/scripts/in_container/configure_environment.sh
+++ b/scripts/in_container/configure_environment.sh
@@ -17,8 +17,6 @@
 # under the License.
 # Script to check licences for all code. Can be started from any working directory
 # shellcheck source=scripts/in_container/_in_container_script_init.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh"
-
 export FILES_DIR="/files"
 export AIRFLOW_BREEZE_CONFIG_DIR="${FILES_DIR}/airflow-breeze-config"
 VARIABLES_ENV_FILE="variables.env"
diff --git a/scripts/in_container/entrypoint_exec.sh b/scripts/in_container/entrypoint_exec.sh
index bce0bfc..728872b 100755
--- a/scripts/in_container/entrypoint_exec.sh
+++ b/scripts/in_container/entrypoint_exec.sh
@@ -15,6 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
+# shellcheck source=scripts/in_container/_in_container_script_init.sh
+. /opt/airflow/scripts/in_container/_in_container_script_init.sh
+
 # shellcheck source=scripts/in_container/configure_environment.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/configure_environment.sh"
 


[airflow] 37/44: Fix parenthesis preventing Keda ScaledObject creation (#13183)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit b3d566642945374d3e5f2ca985c64120125421d4
Author: dstandish <ds...@users.noreply.github.com>
AuthorDate: Mon Dec 21 02:19:26 2020 -0800

    Fix parenthesis preventing Keda ScaledObject creation (#13183)
    
    (cherry picked from commit a9d562e1c3c16c98750c9e3be74347f882acb97a)
---
 chart/templates/workers/worker-kedaautoscaler.yaml |  2 +-
 chart/tests/helm_template_generator.py             |  7 +--
 chart/tests/test_keda.py                           | 57 ++++++++++++++++++++++
 3 files changed, 62 insertions(+), 4 deletions(-)

diff --git a/chart/templates/workers/worker-kedaautoscaler.yaml b/chart/templates/workers/worker-kedaautoscaler.yaml
index e135076..1493133 100644
--- a/chart/templates/workers/worker-kedaautoscaler.yaml
+++ b/chart/templates/workers/worker-kedaautoscaler.yaml
@@ -18,7 +18,7 @@
 ################################
 ## Airflow Worker KEDA Scaler
 #################################
-{{- if (and .Values.workers.keda.enabled ( or (eq .Values.executor "CeleryExecutor"))  (eq .Values.executor "CeleryKubernetesExecutor")) }}
+{{- if (and .Values.workers.keda.enabled ( or (eq .Values.executor "CeleryExecutor") (eq .Values.executor "CeleryKubernetesExecutor"))) }}
 apiVersion: keda.k8s.io/v1alpha1
 kind: ScaledObject
 metadata:
diff --git a/chart/tests/helm_template_generator.py b/chart/tests/helm_template_generator.py
index d8e3f49..8b9fdb2 100644
--- a/chart/tests/helm_template_generator.py
+++ b/chart/tests/helm_template_generator.py
@@ -61,7 +61,7 @@ def validate_k8s_object(instance):
     validate.validate(instance)
 
 
-def render_chart(name="RELEASE-NAME", values=None, show_only=None):
+def render_chart(name="RELEASE-NAME", values=None, show_only=None, validate_schema=True):
     """
     Function that renders a helm chart into dictionaries. For helm chart testing only
     """
@@ -77,8 +77,9 @@ def render_chart(name="RELEASE-NAME", values=None, show_only=None):
         templates = subprocess.check_output(command)
         k8s_objects = yaml.load_all(templates)
         k8s_objects = [k8s_object for k8s_object in k8s_objects if k8s_object]  # type: ignore
-        for k8s_object in k8s_objects:
-            validate_k8s_object(k8s_object)
+        if validate_schema:
+            for k8s_object in k8s_objects:
+                validate_k8s_object(k8s_object)
         return k8s_objects
 
 
diff --git a/chart/tests/test_keda.py b/chart/tests/test_keda.py
new file mode 100644
index 0000000..57da31a
--- /dev/null
+++ b/chart/tests/test_keda.py
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import unittest
+
+import jmespath
+from parameterized import parameterized
+
+from tests.helm_template_generator import render_chart
+
+
+class KedaTest(unittest.TestCase):
+    def test_keda_disabled_by_default(self):
+        """disabled by default"""
+        docs = render_chart(
+            values={},
+            show_only=["templates/workers/worker-kedaautoscaler.yaml"],
+            validate_schema=False,
+        )
+        self.assertListEqual(docs, [])
+
+    @parameterized.expand(
+        [
+            ('SequentialExecutor', False),
+            ('CeleryExecutor', True),
+            ('CeleryKubernetesExecutor', True),
+        ]
+    )
+    def test_keda_enabled(self, executor, is_created):
+        """
+        ScaledObject should only be created when set to enabled and executor is Celery or CeleryKubernetes
+        """
+        docs = render_chart(
+            values={
+                "workers": {"keda": {"enabled": True}, "persistence": {"enabled": False}},
+                'executor': executor,
+            },
+            show_only=["templates/workers/worker-kedaautoscaler.yaml"],
+            validate_schema=False,
+        )
+        if is_created:
+            self.assertEqual("RELEASE-NAME-worker", jmespath.search("metadata.name", docs[0]))
+        else:
+            self.assertListEqual(docs, [])


[airflow] 38/44: Update chart readme to remove astronomer references (#13210)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 126c5ea2e1ce3186a8e2ac582ec6c9786ff787df
Author: dstandish <ds...@users.noreply.github.com>
AuthorDate: Mon Dec 21 05:12:19 2020 -0800

    Update chart readme to remove astronomer references (#13210)
    
    (cherry picked from commit a3cc78dc16bf228901ba64e65d202d4685bf4fc7)
---
 chart/README.md | 39 +++++++++++++++++++++++----------------
 1 file changed, 23 insertions(+), 16 deletions(-)

diff --git a/chart/README.md b/chart/README.md
index c5106be..7a7c81b 100644
--- a/chart/README.md
+++ b/chart/README.md
@@ -286,18 +286,12 @@ Confirm it's up:
 kubectl cluster-info --context kind-kind
 ```
 
-**Add Astronomer's Helm repo:**
-
-```
-helm repo add astronomer https://helm.astronomer.io
-helm repo update
-```
 
 **Create namespace + install the chart:**
 
 ```
 kubectl create namespace airflow
-helm install airflow --n airflow astronomer/airflow
+helm install airflow --n airflow .
 ```
 
 It may take a few minutes. Confirm the pods are up:
@@ -312,25 +306,38 @@ to port-forward the Airflow UI to http://localhost:8080/ to confirm Airflow is w
 
 **Build a Docker image from your DAGs:**
 
-1. Start a project using [astro-cli](https://github.com/astronomer/astro-cli), which will generate a Dockerfile, and load your DAGs in. You can test locally before pushing to kind with `astro airflow start`.
+1. Create a project
 
-        mkdir my-airflow-project && cd my-airflow-project
-        astro dev init
+    ```shell script
+    mkdir my-airflow-project && cd my-airflow-project
+    mkdir dags  # put dags here
+    cat <<EOM > Dockerfile
+    FROM apache/airflow
+    COPY . .
+    EOM
+    ```
 
 2. Then build the image:
 
-        docker build -t my-dags:0.0.1 .
+    ```shell script
+    docker build -t my-dags:0.0.1 .
+    ```
 
 3. Load the image into kind:
 
-        kind load docker-image my-dags:0.0.1
+    ```shell script
+    kind load docker-image my-dags:0.0.1
+    ```
 
 4. Upgrade Helm deployment:
 
-        helm upgrade airflow -n airflow \
-            --set images.airflow.repository=my-dags \
-            --set images.airflow.tag=0.0.1 \
-            astronomer/airflow
+    ```shell script
+    # from airflow chart directory
+    helm upgrade airflow -n airflow \
+        --set images.airflow.repository=my-dags \
+        --set images.airflow.tag=0.0.1 \
+        .
+    ```
 
 ## Contributing
 


[airflow] 35/44: The default value in chart should be 2.0.0 (#13125)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 08bb392204a79dbd789ff85afeb47527545b5b35
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Fri Dec 18 11:50:23 2020 +0100

    The default value in chart should be 2.0.0 (#13125)
    
    (cherry picked from commit f5c4b2442d096ccb873d18f50bc2c5d89d780b03)
---
 chart/values.yaml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/chart/values.yaml b/chart/values.yaml
index 091a0c9..cb605c3 100644
--- a/chart/values.yaml
+++ b/chart/values.yaml
@@ -31,7 +31,7 @@ airflowHome: "/opt/airflow"
 defaultAirflowRepository: apache/airflow
 
 # Default airflow tag to deploy
-defaultAirflowTag: 1.10.12
+defaultAirflowTag: 2.0.0
 
 
 # Select certain nodes for airflow pods.


[airflow] 01/44: Bugfix: Unable to import Airflow plugins on Python 3.8 (#12859)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 5d642a34ae279a6f397cee0d0f470bc61af2cbc4
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Fri Dec 11 15:28:13 2020 +0000

    Bugfix: Unable to import Airflow plugins on Python 3.8 (#12859)
    
    closes https://github.com/apache/airflow/issues/13019
---
 setup.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index 2b6e00d..d0bd0ba 100644
--- a/setup.py
+++ b/setup.py
@@ -622,7 +622,7 @@ INSTALL_REQUIREMENTS = [
     'future>=0.16.0, <0.19',
     'graphviz>=0.12',
     'gunicorn>=19.5.0, <21.0',
-    'importlib-metadata~=2.0; python_version<"3.8"',
+    'importlib-metadata~=2.0; python_version<"3.9"',
     'importlib_resources~=1.4',
     'iso8601>=0.1.12',
     'jinja2>=2.10.1, <2.12.0',


[airflow] 30/44: Include airflow/contrib/executors in the dist package

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 81f34e8f51e318c4ca62a5c23feb411a58ad7c79
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Tue Dec 22 15:18:58 2020 +0000

    Include airflow/contrib/executors in the dist package
    
    The __init__.py was removed by mistake in
     https://github.com/apache/airflow/commit/3437663676f287b84e9c3fa05ed404e4adf34739#diff-db9046a3e4e52642aac8677be5e7f72fc19aa5612001a8f6bd5586c181dddfb6
---
 airflow/contrib/executors/__init__.py | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)

diff --git a/airflow/contrib/executors/__init__.py b/airflow/contrib/executors/__init__.py
new file mode 100644
index 0000000..114d189
--- /dev/null
+++ b/airflow/contrib/executors/__init__.py
@@ -0,0 +1,18 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.


[airflow] 34/44: fixup! Include airflow/contrib/executors in the dist package

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4e9c93f1f9650959ad49151d00ab9fae1a2ff0c2
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Dec 23 19:50:02 2020 +0100

    fixup! Include airflow/contrib/executors in the dist package
---
 docs/conf.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/docs/conf.py b/docs/conf.py
index 0c390ca..1728274 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -192,6 +192,7 @@ exclude_patterns = [
     '_api/airflow/configuration',
     '_api/airflow/contrib/auth',
     '_api/airflow/contrib/example_dags',
+    '_api/airflow/contrib/executors/index.rst',
     '_api/airflow/contrib/index.rst',
     '_api/airflow/contrib/kubernetes',
     '_api/airflow/contrib/task_runner',


[airflow] 07/44: Fix chart jobs delete policy for improved idempotency (#12646)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 70c5a8ab9997c9e9feaa676573462dca3779eae4
Author: Florent Chehab <fc...@meilleursagents.com>
AuthorDate: Tue Dec 1 19:24:42 2020 +0100

    Fix chart jobs delete policy for improved idempotency (#12646)
    
    The chart has two jobs (migrate-database & create-user).
    These jobs are run post-install and post-upgrade and only deleted on success.
    
    So if for some reason (quick reinstall / upgrade), the job fails or is stuck then helm
    will fail because the job already exists.
    
    This commit sets the `helm.sh/hook-delete-policy` to `before-hook-creation,hook-succeeded`
    so helm will always delete the jobs before creating them again.
    
    (cherry picked from commit a697c588c43450a9b69e607b052c17a27bd5ac0e)
---
 chart/templates/create-user-job.yaml      | 2 +-
 chart/templates/migrate-database-job.yaml | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/chart/templates/create-user-job.yaml b/chart/templates/create-user-job.yaml
index 4df7dd6..9ae0ee7 100644
--- a/chart/templates/create-user-job.yaml
+++ b/chart/templates/create-user-job.yaml
@@ -35,7 +35,7 @@ metadata:
   annotations:
     "helm.sh/hook": post-install
     "helm.sh/hook-weight": "2"
-    "helm.sh/hook-delete-policy": hook-succeeded
+    "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded
 spec:
   template:
     metadata:
diff --git a/chart/templates/migrate-database-job.yaml b/chart/templates/migrate-database-job.yaml
index 8639648..fb30498 100644
--- a/chart/templates/migrate-database-job.yaml
+++ b/chart/templates/migrate-database-job.yaml
@@ -34,7 +34,7 @@ metadata:
   annotations:
     "helm.sh/hook": post-install,post-upgrade
     "helm.sh/hook-weight": "1"
-    "helm.sh/hook-delete-policy": hook-succeeded
+    "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded
 spec:
   template:
     metadata:


[airflow] 23/44: fixup! Install airflow and providers from dist and verifies them (#13033)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 16c4ebd6724497c635d86cd1e91d1be06bf0cd0b
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Dec 14 00:57:24 2020 +0100

    fixup! Install airflow and providers from dist and verifies them  (#13033)
---
 scripts/ci/libraries/_build_images.sh | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index d0e0213..c00d96e 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -912,13 +912,13 @@ function build_images::build_prod_images_from_packages() {
     rm -f "${AIRFLOW_SOURCES}/dist/"apache_airflow*.whl
     rm -f "${AIRFLOW_SOURCES}/dist/"apache-airflow*.tar.gz
 
-    # Remove all downloaded apache airflow packages
-    mv -f "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/"
+    # Move all downloaded packages
+    mv -f "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/" || true
 
     # Build apache airflow packages
     build_airflow_packages::build_airflow_packages
 
-    mv "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/"
+    mv -f "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/" || true
     build_images::build_prod_images
 }
 


[airflow] 06/44: User-friendly output of Breeze and CI scripts (#12735)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4fca05e7c184c736cba49bc24123d00acb9cfa4e
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Tue Dec 1 17:44:05 2020 +0100

    User-friendly output of Breeze and CI scripts (#12735)
    
    (cherry picked from commit a02e0f746f85e6ac8786bc1d99bf78994e66b5cf)
---
 breeze                                             | 41 +++++------
 scripts/ci/images/ci_verify_ci_image.sh            |  4 +-
 scripts/ci/images/ci_verify_prod_image.sh          | 15 ++--
 scripts/ci/libraries/_build_images.sh              | 86 ++++++++++++----------
 scripts/ci/libraries/_initialization.sh            | 20 +++++
 scripts/ci/libraries/_kind.sh                      | 30 ++++----
 scripts/ci/libraries/_parameters.sh                | 17 +++--
 scripts/ci/libraries/_push_pull_remove_images.sh   | 38 +++++-----
 scripts/ci/libraries/_repeats.sh                   | 13 ++--
 scripts/ci/libraries/_sanity_checks.sh             | 68 +++++++++--------
 .../ci/pre_commit/pre_commit_check_pre_commits.sh  | 40 +++++-----
 scripts/ci/pre_commit/pre_commit_mermaid.sh        |  6 +-
 scripts/ci/selective_ci_checks.sh                  |  8 +-
 scripts/ci/static_checks/check_license.sh          | 15 ++--
 scripts/in_container/_in_container_script_init.sh  |  6 +-
 scripts/in_container/_in_container_utils.sh        | 51 ++++++++++---
 scripts/in_container/check_environment.sh          |  5 +-
 scripts/in_container/entrypoint_ci.sh              |  6 +-
 scripts/in_container/run_ci_tests.sh               | 83 +++++++++++----------
 scripts/in_container/run_cli_tool.sh               | 20 +++--
 scripts/in_container/run_system_tests.sh           |  2 +
 tests/bats/breeze/test_breeze_params.bats          | 17 +----
 tests/bats/in_container/test_in_container.bats     |  2 +
 23 files changed, 339 insertions(+), 254 deletions(-)

diff --git a/breeze b/breeze
index 6f73ad7..d99ba02 100755
--- a/breeze
+++ b/breeze
@@ -215,11 +215,11 @@ function breeze::initialize_virtualenv() {
     local res=$?
     set -e
     if [[ ${res} != "0" ]]; then
-        echo >&2
-        echo >&2 "ERROR:  Initializing local virtualenv only works when you have virtualenv activated"
-        echo >&2
-        echo >&2 "Please enter your local virtualenv before (for example using 'pyenv activate' or 'workon') "
-        echo >&2
+        echo
+        echo  "${COLOR_RED_ERROR} Initializing local virtualenv only works when you have virtualenv activated  ${COLOR_RESET}"
+        echo
+        echo "Please enter your local virtualenv before (for example using 'pyenv activate' or 'workon') "
+        echo
         exit 1
     else
         echo
@@ -1138,9 +1138,9 @@ function breeze::parse_arguments() {
             ;;
         *)
             breeze::flags
-            echo >&2
-            echo >&2 "ERROR: Unknown flag ${1}"
-            echo >&2
+            echo
+            echo  "${COLOR_RED_ERROR} Unknown flag  ${COLOR_RESET}"
+            echo
             exit 1
             ;;
         esac
@@ -1323,9 +1323,9 @@ function breeze::parse_arguments() {
             ;;
         *)
             breeze::usage
-            echo >&2
-            echo >&2 "ERROR: Unknown command ${1}"
-            echo >&2
+            echo
+            echo  "${COLOR_RED_ERROR} Unknown command  ${COLOR_RESET}"
+            echo
             exit 1
             ;;
         esac
@@ -2769,10 +2769,9 @@ function breeze::make_sure_precommit_is_installed() {
     elif command -v pip >/dev/null; then
         pip_binary=pip
     else
-        echo >&2
-        echo >&2 "ERROR: You need to have pip or pip3 in your PATH"
-        echo >&2
-        S
+        echo
+        echo  "${COLOR_RED_ERROR} You need to have pip or pip3 in your PATH  ${COLOR_RESET}"
+        echo
         exit 1
     fi
     "${pip_binary}" install --upgrade pre-commit >/dev/null 2>&1
@@ -2934,9 +2933,9 @@ function breeze::run_build_command() {
         fi
         ;;
     *)
-        echo >&2
-        echo >&2 "ERROR: Unknown command to run ${command_to_run}"
-        echo >&2
+        echo
+        echo  "${COLOR_RED_ERROR} Unknown command to run ${command_to_run}  ${COLOR_RESET}"
+        echo
         exit 1
         ;;
     esac
@@ -3059,9 +3058,9 @@ function breeze::run_breeze_command() {
         fi
         ;;
     *)
-        echo >&2
-        echo >&2 "ERROR: Unknown command to run ${command_to_run}"
-        echo >&2
+        echo
+        echo  "${COLOR_RED_ERROR} Unknown command to run ${command_to_run}  ${COLOR_RESET}"
+        echo
         ;;
     esac
     set -u
diff --git a/scripts/ci/images/ci_verify_ci_image.sh b/scripts/ci/images/ci_verify_ci_image.sh
index e1f2b98..004eac0 100755
--- a/scripts/ci/images/ci_verify_ci_image.sh
+++ b/scripts/ci/images/ci_verify_ci_image.sh
@@ -28,12 +28,12 @@ function verify_ci_image_dependencies() {
     docker run --rm --entrypoint /bin/bash "${AIRFLOW_CI_IMAGE}" -c 'pip check'
     local res=$?
     if [[ ${res} != "0" ]]; then
-        echo -e " \e[31mERROR: ^^^ Some dependencies are conflicting. See instructions below on how to deal with it.\e[0m"
+        echo  "${COLOR_RED_ERROR} ^^^ Some dependencies are conflicting. See instructions below on how to deal with it.  ${COLOR_RESET}"
         echo
         build_images::inform_about_pip_check ""
     else
         echo
-        echo -e " \e[32mOK. The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent.\e[0m"
+        echo  "${COLOR_GREEN_OK} The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent.  ${COLOR_RESET}"
         echo
     fi
     set -e
diff --git a/scripts/ci/images/ci_verify_prod_image.sh b/scripts/ci/images/ci_verify_prod_image.sh
index 9718a48..274261b 100755
--- a/scripts/ci/images/ci_verify_prod_image.sh
+++ b/scripts/ci/images/ci_verify_prod_image.sh
@@ -39,14 +39,15 @@ function verify_prod_image_has_airflow {
     echo
 
     if [[ "${COUNT_AIRFLOW_DIRS}" -lt "${EXPECTED_MIN_AIRFLOW_DIRS_COUNT}" ]]; then
-        >&2 echo
-        >&2 echo Number of airflow folders installed is less than ${EXPECTED_MIN_AIRFLOW_DIRS_COUNT}
-        >&2 echo This is unexpected. Please investigate, looking at the output above!
-        >&2 echo
+        echo
+        echo  "${COLOR_RED_ERROR} Number of airflow folders installed is less than ${EXPECTED_MIN_AIRFLOW_DIRS_COUNT}  ${COLOR_RESET}"
+        echo
+        echo "This is unexpected. Please investigate, looking at the output above!"
+        echo
         exit 1
     else
         echo
-        echo -e " \e[32mOK. Airflow is installed.\e[0m"
+        echo  "${COLOR_GREEN_OK} Airflow is installed.  ${COLOR_RESET}"
         echo
     fi
 }
@@ -62,14 +63,14 @@ function verify_prod_image_dependencies {
     docker run --rm --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" -c 'pip check'
     local res=$?
     if [[ ${res} != "0" ]]; then
-        echo -e " \e[31mERROR: ^^^ Some dependencies are conflicting. See instructions below on how to deal with it.\e[0m"
+        echo  "${COLOR_RED_ERROR} ^^^ Some dependencies are conflicting. See instructions below on how to deal with it.  ${COLOR_RESET}"
         echo
         build_images::inform_about_pip_check "--production "
         # TODO(potiuk) - enable the comment once https://github.com/apache/airflow/pull/12188 is merged
         # exit ${res}
     else
         echo
-        echo " \e[32mOK. The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent.\e[0m"
+        echo " \e[32mOK. The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent.  ${COLOR_RESET}"
         echo
     fi
     set -e
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 30e7a85..17889ce 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -180,14 +180,17 @@ function build_images::confirm_image_rebuild() {
         export FORCE_ANSWER_TO_QUESTIONS="no"
         echo 'export FORCE_ANSWER_TO_QUESTIONS="no"' >"${LAST_FORCE_ANSWER_FILE}"
     elif [[ ${RES} == "2" ]]; then
-        echo >&2
-        echo >&2 "ERROR: The ${THE_IMAGE_TYPE} needs to be rebuilt - it is outdated. "
-        echo >&2 "   Make sure you build the images bu running"
-        echo >&2
-        echo >&2 "      ./breeze --python ${PYTHON_MAJOR_MINOR_VERSION}" build-image
-        echo >&2
-        echo >&2 "   If you run it via pre-commit as individual hook, you can run 'pre-commit run build'."
-        echo >&2
+        echo
+        echo  "${COLOR_RED_ERROR} The ${THE_IMAGE_TYPE} needs to be rebuilt - it is outdated.   ${COLOR_RESET}"
+        echo """
+
+   Make sure you build the images bu running
+
+      ./breeze --python ${PYTHON_MAJOR_MINOR_VERSION} build-image
+
+   If you run it via pre-commit as individual hook, you can run 'pre-commit run build'.
+
+"""
         exit 1
     else
         # Force "yes" also to subsequent questions
@@ -535,9 +538,9 @@ function build_images::build_ci_image() {
             "--cache-from" "${AIRFLOW_CI_IMAGE}"
         )
     else
-        echo >&2
-        echo >&2 "Error - the ${DOCKER_CACHE} cache is unknown!"
-        echo >&2
+        echo
+        echo  "${COLOR_RED_ERROR} The ${DOCKER_CACHE} cache is unknown!  ${COLOR_RESET}"
+        echo
         exit 1
     fi
     EXTRA_DOCKER_CI_BUILD_FLAGS=(
@@ -711,9 +714,10 @@ function build_images::build_prod_images() {
             "--cache-from" "${AIRFLOW_PROD_BUILD_IMAGE}"
         )
     else
-        echo >&2
-        echo >&2 "Error - thee ${DOCKER_CACHE} cache is unknown!"
-        echo >&2
+        echo
+        echo  "${COLOR_RED_ERROR} The ${DOCKER_CACHE} cache is unknown  ${COLOR_RESET}"
+        echo
+        echo
         exit 1
     fi
     set +u
@@ -853,60 +857,64 @@ function build_images::determine_docker_cache_strategy() {
 
 # Useful information for people who stumble upon a pip check failure
 function build_images::inform_about_pip_check() {
-        >&2 echo """
+        echo """
+${COLOR_BLUE}***** Beginning of the instructions ****${COLOR_RESET}
 
 The image did not pass 'pip check' verification. This means that there are some conflicting dependencies
-in the image. Usually it means that some setup.py or setup.cfg limits need to be adjusted to fix it.
+in the image.
+
+It can mean one of those:
+
+1) The master is currently broken (other PRs will fail with the same error)
+2) You changed some dependencies in setup.py or setup.cfg and they are conflicting.
 
-Usually it happens when one of the dependencies gets upgraded and it has more strict requirements
-than the other dependencies and they are conflicting.
 
-In case you did not update setup.py or any of your dependencies, this error might happen in case
-someone accidentally merges conflicting dependencies in master. This
-should not happen as we are running 'pip check' as dependency before we upgrade the constrained
-dependencies, but we could miss some edge cases (thank you for your patience). Please let committer now
-and apologies for the troubles. You do not have to do anything in this case. You might be asked to
-rebase to the latest master after the problem is fixed.
+In case 1) - apologies for the trouble.Please let committers know and they will fix it. You might
+be asked to rebase to the latest master after the problem is fixed.
 
-In case you actually updated setup.py, there are some steps you can take to address that:
+In case 2) - Follow the steps below:
 
-* first of all ask the committer to set 'upgrade to newer dependencies' and 'full tests needed' labels
-  for your PR. This will turn your PR in mode where all the dependencies are upgraded to latest matching
-  dependencies and the checks will run for all python versions
+* consult the committers if you are unsure what to do. Just comment in the PR that you need help, if you do,
+  but try to follow those instructions first!
 
-* run locally the image that is failing with Breeze - this will make it easy to manually try to update
-  the setup.py and test the consequences of changing constraints. You can do it by checking out your PR
-  and running this command:
+* ask the committer to set 'upgrade to newer dependencies'. All dependencies in your PR will be updated
+  to latest 'good' versions and you will be able to check if they are not conflicting.
+
+* run locally the image that is failing with Breeze:
 
     ./breeze ${1}--github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} --python ${PYTHON_MAJOR_MINOR_VERSION}
 
-* your setup.py and setup.cfg will be mounted to the container and you will be able to iterate with
+* your setup.py and setup.cfg will be mounted to the container. You will be able to iterate with
   different setup.py versions.
 
-* run 'pipdeptree' to figure out where the dependency conflict comes from. Useful commands that can help you
-  to find out dependencies you have are:
+* in container your can run 'pipdeptree' to figure out where the dependency conflict comes from.
+
+* Some useful commands that can help yoy to find out dependencies you have:
+
      * 'pipdeptree | less' (you can then search through the dependencies with vim-like shortcuts)
+
      * 'pipdeptree > /files/pipdeptree.txt' - this will produce a pipdeptree.txt file in your source
        'files' directory and you can open it in editor of your choice,
+
      * 'pipdeptree | grep YOUR_DEPENDENCY' - to see all the requirements your dependency has as specified
        by other packages
 
-* figure out which dependency limits should be upgraded. First try to upgrade them in setup.py extras
+* figure out which dependency limits should be upgraded. Upgrade them in corresponding setup.py extras
   and run pip to upgrade your dependencies accordingly:
 
      pip install '.[all]' --upgrade --upgrade-strategy eager
 
-* run pip check to figure out if the dependencies have been fixed (it should let you know which dependencies
+* run pip check to figure out if the dependencies have been fixed. It should let you know which dependencies
   are conflicting or (hurray!) if there are no conflicts:
 
      pip check
 
 * in some, rare, cases, pip will not limit the requirement in case you specify it in extras, you might
-  need to add such requirement in 'install_requires' section of setup.cfg in order to have pip take it into
-  account. This will happen if higher version of your dependency is already installed in 'install_requires'
-  section. In such case update 'setup.cfg' and run pip install/pip check from the previous steps
+  need to add such requirement in 'install_requires' section of setup.cfg instead of extras in setup.py.
 
 * iterate until all such dependency conflicts are fixed.
 
+${COLOR_BLUE}***** End of the instructions ****${COLOR_RESET}
+
 """
 }
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 5890834..9c3ab67 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -458,8 +458,28 @@ function initialization::initialize_build_image_variables() {
     REMOTE_IMAGE_BUILD_CACHE_HASH_FILE="${AIRFLOW_SOURCES}/manifests/remote-build-cache-hash"
 }
 
+function initialization::set_output_color_variables() {
+    COLOR_BLUE=$'\e[37m'
+    COLOR_GREEN=$'\e[32m'
+    COLOR_GREEN_OK=$'\e[32mOK.'
+    COLOR_RED=$'\e[31m'
+    COLOR_RED_ERROR=$'\e[31mERROR:'
+    COLOR_RESET=$'\e[0m'
+    COLOR_YELLOW=$'\e[33m'
+    COLOR_YELLOW_WARNING=$'\e[33mWARNING:'
+    export COLOR_BLUE
+    export COLOR_GREEN
+    export COLOR_GREEN_OK
+    export COLOR_RED
+    export COLOR_RED_ERROR
+    export COLOR_RESET
+    export COLOR_YELLOW
+    export COLOR_YELLOW_WARNING
+}
+
 # Common environment that is initialized by both Breeze and CI scripts
 function initialization::initialize_common_environment() {
+    initialization::set_output_color_variables
     initialization::create_directories
     initialization::initialize_base_variables
     initialization::initialize_branch_variables
diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh
index 3a170cb..ab48007 100644
--- a/scripts/ci/libraries/_kind.sh
+++ b/scripts/ci/libraries/_kind.sh
@@ -121,9 +121,9 @@ function kind::perform_kind_cluster_operation() {
     ALLOWED_KIND_OPERATIONS="[ start restart stop deploy test shell recreate k9s]"
     set +u
     if [[ -z "${1=}" ]]; then
-        echo >&2
-        echo >&2 "Operation must be provided as first parameter. One of: ${ALLOWED_KIND_OPERATIONS}"
-        echo >&2
+        echo
+        echo  "${COLOR_RED_ERROR} Operation must be provided as first parameter. One of: ${ALLOWED_KIND_OPERATIONS}  ${COLOR_RESET}"
+        echo
         exit 1
     fi
     set -u
@@ -202,9 +202,9 @@ function kind::perform_kind_cluster_operation() {
                 -e EDITOR -e K9S_EDITOR \
                 -v "${KUBECONFIG}:/root/.kube/config" quay.io/derailed/k9s
         else
-            echo >&2
-            echo >&2 "Wrong cluster operation: ${OPERATION}. Should be one of: ${ALLOWED_KIND_OPERATIONS}"
-            echo >&2
+            echo
+            echo  "${COLOR_RED_ERROR} Wrong cluster operation: ${OPERATION}. Should be one of: ${ALLOWED_KIND_OPERATIONS}  ${COLOR_RESET}"
+            echo
             exit 1
         fi
     else
@@ -220,14 +220,14 @@ function kind::perform_kind_cluster_operation() {
             echo
             kind::create_cluster
         elif [[ ${OPERATION} == "stop" || ${OPERATION} == "deploy" || ${OPERATION} == "test" || ${OPERATION} == "shell" ]]; then
-            echo >&2
-            echo >&2 "Cluster ${KIND_CLUSTER_NAME} does not exist. It should exist for ${OPERATION} operation"
-            echo >&2
+            echo
+            echo  "${COLOR_RED_ERROR} Cluster ${KIND_CLUSTER_NAME} does not exist. It should exist for ${OPERATION} operation  ${COLOR_RESET}"
+            echo
             exit 1
         else
-            echo >&2
-            echo >&2 "Wrong cluster operation: ${OPERATION}. Should be one of ${ALLOWED_KIND_OPERATIONS}"
-            echo >&2
+            echo
+            echo  "${COLOR_RED_ERROR} Wrong cluster operation: ${OPERATION}. Should be one of ${ALLOWED_KIND_OPERATIONS}  ${COLOR_RESET}"
+            echo
             exit 1
         fi
     fi
@@ -293,9 +293,9 @@ function kind::wait_for_webserver_healthy() {
         sleep "${SLEEP_TIME_FOR_HEALTH_CHECK}"
         num_tries=$((num_tries + 1))
         if [[ ${num_tries} == "${MAX_NUM_TRIES_FOR_HEALTH_CHECK}" ]]; then
-            >&2 echo
-            >&2 echo "Timeout while waiting for the webserver health check"
-            >&2 echo
+            echo
+            echo  "${COLOR_RED_ERROR} Timeout while waiting for the webserver health check  ${COLOR_RESET}"
+            echo
         fi
     done
     echo
diff --git a/scripts/ci/libraries/_parameters.sh b/scripts/ci/libraries/_parameters.sh
index d655853..566585e 100644
--- a/scripts/ci/libraries/_parameters.sh
+++ b/scripts/ci/libraries/_parameters.sh
@@ -40,15 +40,16 @@ function parameters::check_allowed_param() {
     _ALLOWED_VALUES=" ${!_ALLOWED_VALUES_ENV_NAME//$'\n'/ } "
     _VALUE=${!_VARIABLE_NAME}
     if [[ ${_ALLOWED_VALUES:=} != *" ${_VALUE} "* ]]; then
-        echo >&2
-        echo >&2 "ERROR:  Allowed ${_VARIABLE_DESCRIPTIVE_NAME}: [${_ALLOWED_VALUES}]. Passed: '${!_VARIABLE_NAME}'."
-        echo >&2
-        echo >&2 "Switch to supported value with ${_FLAG} flag."
-
+        echo
+        echo  "${COLOR_RED_ERROR} Allowed ${_VARIABLE_DESCRIPTIVE_NAME}: [${_ALLOWED_VALUES}]. Passed: '${!_VARIABLE_NAME}'  ${COLOR_RESET}"
+        echo
+        echo "Switch to supported value with ${_FLAG} flag."
+        echo
         if [[ -n ${!_VARIABLE_NAME} && -f "${BUILD_CACHE_DIR}/.${_VARIABLE_NAME}" && ${!_VARIABLE_NAME} == $(cat "${BUILD_CACHE_DIR}/.${_VARIABLE_NAME}") ]]; then
-            echo >&2
-            echo >&2 "Removing ${BUILD_CACHE_DIR}/.${_VARIABLE_NAME}. Next time you run it, it should be OK."
-            echo >&2
+            echo
+            echo  "${COLOR_YELLOW_WARNING}: Removing ${BUILD_CACHE_DIR}/.${_VARIABLE_NAME}. Next time you run it, it should be OK.  ${COLOR_RESET}"
+            echo
+            echo
             rm -f "${BUILD_CACHE_DIR}/.${_VARIABLE_NAME}"
         fi
         exit 1
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index 216e025..fff08ad 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -29,17 +29,17 @@ function push_pull_remove_images::push_image_with_retries() {
         local res=$?
         set -e
         if [[ ${res} != "0" ]]; then
-            >&2 echo
-            >&2 echo "Error ${res} when pushing image on ${try_num} try"
-            >&2 echo
+            echo
+            echo  "${COLOR_YELLOW_WARNING}: Error ${res} when pushing image on ${try_num} try  ${COLOR_RESET}"
+            echo
             continue
         else
             return 0
         fi
     done
-    >&2 echo
-    >&2 echo "Error ${res} when pushing image on ${try_num} try. Giving up!"
-    >&2 echo
+    echo
+    echo  "${COLOR_RED_ERROR} Error ${res} when pushing image on ${try_num} try. Giving up!  ${COLOR_RESET}"
+    echo
     return 1
 }
 
@@ -64,17 +64,19 @@ function push_pull_remove_images::pull_image_if_not_present_or_forced() {
         docker pull "${IMAGE_TO_PULL}"
         EXIT_VALUE="$?"
         if [[ ${EXIT_VALUE} != "0" && ${FAIL_ON_GITHUB_DOCKER_PULL_ERROR} == "true" ]]; then
-            >&2 echo
-            >&2 echo "ERROR! Exiting on docker pull error"
-            >&2 echo
-            >&2 echo "If you have authorisation problems, you might want to run:"
-            >&2 echo
-            >&2 echo "docker login ${IMAGE_TO_PULL%%\/*}"
-            >&2 echo
-            >&2 echo "You need to use generate token as the password, not your personal password."
-            >&2 echo "You can generete one at https://github.com/settings/tokens"
-            >&2 echo "Make sure to choose 'read:packages' scope".
-            >&2 echo
+            echo
+            echo """
+${COLOR_RED_ERROR} Exiting on docker pull error
+
+If you have authorisation problems, you might want to run:
+
+docker login ${IMAGE_TO_PULL%%\/*}
+
+You need to use generate token as the password, not your personal password.
+You can generate one at https://github.com/settings/tokens
+Make sure to choose 'read:packages' scope.
+${COLOR_RESET}
+"""
             exit ${EXIT_VALUE}
         fi
         echo
@@ -282,7 +284,7 @@ function push_pull_remove_images::wait_for_github_registry_image() {
         digest=$(jq '.config.digest' < "${OUTPUT_LOG}")
         echo -n "."
         if [[ ${digest} != "null" ]]; then
-            echo -e " \e[32mOK.\e[0m"
+            echo  "${COLOR_GREEN_OK}  ${COLOR_RESET}"
             break
         fi
         sleep 10
diff --git a/scripts/ci/libraries/_repeats.sh b/scripts/ci/libraries/_repeats.sh
index 96e4582..b245c5b 100644
--- a/scripts/ci/libraries/_repeats.sh
+++ b/scripts/ci/libraries/_repeats.sh
@@ -34,12 +34,13 @@ function repeats::run_with_retry() {
         if [[ ${res} == "0" ]]; then
             return 0
         fi
-        >&2 echo
-        >&2 echo "Unsuccessful attempt no. ${n}. Result: ${res}"
-        >&2 echo
+        echo
+        echo  "${COLOR_YELLOW_WARNING}: Unsuccessful attempt no. ${n}. Result: ${res}  ${COLOR_RESET}"
+        echo
+        echo
     done
-    >&2 echo
-    >&2 echo "Giving up after ${num_repeats} attempts!"
-    >&2 echo
+    echo
+    echo  "${COLOR_RED_ERROR} Giving up after ${num_repeats} attempts!  ${COLOR_RESET}"
+    echo
     return ${res}
 }
diff --git a/scripts/ci/libraries/_sanity_checks.sh b/scripts/ci/libraries/_sanity_checks.sh
index 9347bf4..3138776 100644
--- a/scripts/ci/libraries/_sanity_checks.sh
+++ b/scripts/ci/libraries/_sanity_checks.sh
@@ -77,32 +77,36 @@ function sanity_checks::check_if_coreutils_installed() {
     if [[ ${GETOPT_RETVAL} != 4 || "${STAT_PRESENT}" != "0" || "${MD5SUM_PRESENT}" != "0" ]]; then
         verbosity::print_info
         if [[ $(uname -s) == 'Darwin' ]] ; then
-            echo >&2 "You are running ${CMDNAME} in OSX environment"
-            echo >&2 "And you need to install gnu commands"
-            echo >&2
-            echo >&2 "Run 'brew install gnu-getopt coreutils'"
-            echo >&2
-            echo >&2 "Then link the gnu-getopt to become default as suggested by brew."
-            echo >&2
-            echo >&2 "If you use bash, you should run these commands:"
-            echo >&2
-            echo >&2 "echo 'export PATH=\"/usr/local/opt/gnu-getopt/bin:\$PATH\"' >> ~/.bash_profile"
-            echo >&2 ". ~/.bash_profile"
-            echo >&2
-            echo >&2 "If you use zsh, you should run these commands:"
-            echo >&2
-            echo >&2 "echo 'export PATH=\"/usr/local/opt/gnu-getopt/bin:\$PATH\"' >> ~/.zprofile"
-            echo >&2 ". ~/.zprofile"
-            echo >&2
-            echo >&2 "Either source the profile file as shown above, or re-login afterwards."
-            echo >&2
-            echo >&2 "After that, your PATH variable should start with \"/usr/local/opt/gnu-getopt/bin\""
-            echo >&2 "Your current path is ${PATH}"
-            echo >&2
+            echo """
+${COLOR_RED_ERROR} You are running ${CMDNAME} in OSX environment and ou need to install gnu commands
+
+Run 'brew install gnu-getopt coreutils'
+
+Then link the gnu-getopt to become default as suggested by brew.
+
+If you use bash, you should run these commands:
+
+echo 'export PATH=\"/usr/local/opt/gnu-getopt/bin:\$PATH\"' >> ~/.bash_profile
+. ~/.bash_profile
+
+If you use zsh, you should run these commands:
+
+echo 'export PATH=\"/usr/local/opt/gnu-getopt/bin:\$PATH\"' >> ~/.zprofile
+. ~/.zprofile
+
+Either source the profile file as shown above, or re-login afterwards.
+
+After that, your PATH variable should start with \"/usr/local/opt/gnu-getopt/bin\"
+Your current path is ${PATH}
+${COLOR_RESET}
+"""
         else
-            echo >&2 "You do not have necessary tools in your path (getopt, stat, md5sum)."
-            echo >&2 "Please install latest/GNU version of getopt and coreutils."
-            echo >&2 "This can usually be done with 'apt install util-linux coreutils'"
+            echo """
+${COLOR_RED_ERROR} You do not have necessary tools in your path (getopt, stat, md5sum).
+Please install latest/GNU version of getopt and coreutils.
+This can usually be done with 'apt install util-linux coreutils'
+${COLOR_RESET}
+"""
         fi
         verbosity::print_info
         exit 1
@@ -117,12 +121,14 @@ function sanity_checks::assert_not_in_container() {
         return
     fi
     if [[ -f /.dockerenv ]]; then
-        echo >&2
-        echo >&2 "You are inside the Airflow docker container!"
-        echo >&2 "You should only run this script from the host."
-        echo >&2 "Learn more about how we develop and test airflow in:"
-        echo >&2 "https://github.com/apache/airflow/blob/master/TESTING.rst"
-        echo >&2
+        echo """
+\e[31mERROR: You are inside the Airflow docker container
+
+You should only run this script from the host.
+Learn more about how we develop and test airflow at:
+https://github.com/apache/airflow/blob/master/TESTING.rst
+
+"""
         exit 1
     fi
 }
diff --git a/scripts/ci/pre_commit/pre_commit_check_pre_commits.sh b/scripts/ci/pre_commit/pre_commit_check_pre_commits.sh
index 1b37ca5..59165b2 100755
--- a/scripts/ci/pre_commit/pre_commit_check_pre_commits.sh
+++ b/scripts/ci/pre_commit/pre_commit_check_pre_commits.sh
@@ -38,29 +38,35 @@ for pre_commit in ${all_pre_commits}
 do
     if ! grep -q "${pre_commit}" "${STATIC_CODE_CHECKS_FILE}"; then
         error="true"
-        >&2 echo
-        >&2 echo "ERROR: Pre-commit ${pre_commit} is not described in ${STATIC_CODE_CHECKS_FILE}"
-        >&2 echo
-        >&2 echo "FIX: Please add ${pre_commit} in the table in the 'Pre-commit hooks' chapter in ${STATIC_CODE_CHECKS_FILE}"
-        >&2 echo
+        echo
+        echo """
+${COLOR_RED_ERROR} Pre-commit ${pre_commit} is not described in ${STATIC_CODE_CHECKS_FILE}
+ERROR: Pre-commit ${pre_commit} is not described in ${STATIC_CODE_CHECKS_FILE}
+
+FIX: Please add ${pre_commit} in the table in the 'Pre-commit hooks' chapter in ${STATIC_CODE_CHECKS_FILE}
+${COLOR_RESET}
+"""
+        echo
     fi
+    # shellcheck disable=SC2154
     if [[ ! ${_breeze_allowed_static_checks} == *${pre_commit}* ]]; then
         error="true"
-        >&2 echo
-        >&2 echo "ERROR: Pre-commit ${pre_commit} is missing in _breeze_allowed_static_checks variable in breeze-complete"
-        >&2 echo
-        >&2 echo "FIX: Please add ${pre_commit} in the table in the '_breeze_allowed_static_checks' constant in ${AIRFLOW_SOURCES}/breeze-complete"
-        >&2 echo
+        echo """
+${COLOR_RED_ERROR}: Pre-commit ${pre_commit} is missing in _breeze_allowed_static_checks variable in breeze-complete
+
+FIX: Please add ${pre_commit} in the table in the '_breeze_allowed_static_checks' constant in ${AIRFLOW_SOURCES}/breeze-complete
+${COLOR_RESET}
+"""
     fi
 done
 
 if [[ ${error} == "true" ]]; then
-  >&2 echo
-  >&2 echo "Some pre-commits are not synchronized! Please fix the errors above!"
-  >&2 echo
-  exit 1
+    echo
+    echo  "${COLOR_RED_ERROR} Some pre-commits are not synchronized! Please fix the errors above!  ${COLOR_RESET}"
+    echo
+    exit 1
 else
-  echo
-  echo "All pre-commits are synchronized!"
-  echo
+    echo
+    echo "${COLOR_GREEN_OK} All pre-commits are synchronized!  ${COLOR_RESET}"
+    echo
 fi
diff --git a/scripts/ci/pre_commit/pre_commit_mermaid.sh b/scripts/ci/pre_commit/pre_commit_mermaid.sh
index fc52c91..60db93e 100755
--- a/scripts/ci/pre_commit/pre_commit_mermaid.sh
+++ b/scripts/ci/pre_commit/pre_commit_mermaid.sh
@@ -86,9 +86,9 @@ EOF
             echo "Please add both files and commit them to repository"
             echo
         else
-            1>&2 echo
-            1>&2 echo "ERROR: Could not generate ${basename_file}.png"
-            1>&2 echo
+            echo
+            echo "\e[31mERROR: Could not generate ${basename_file}.png  ${COLOR_RESET}"
+            echo
             exit 1
         fi
     else
diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh
index 8696d56..f8f43cc 100755
--- a/scripts/ci/selective_ci_checks.sh
+++ b/scripts/ci/selective_ci_checks.sh
@@ -145,10 +145,10 @@ function get_changed_files() {
     CHANGED_FILES=$(git diff-tree --no-commit-id --name-only \
         -r "${INCOMING_COMMIT_SHA}^" "${INCOMING_COMMIT_SHA}" || true)
     if [[ ${CHANGED_FILES} == "" ]]; then
-        >&2 echo
-        >&2 echo Warning! Could not find any changed files
-        >&2 echo Assuming that we should run all tests in this case
-        >&2 echo
+        echo
+        echo  "${COLOR_YELLOW_WARNING}: Could not find any changed files  ${COLOR_RESET}"
+        echo Assuming that we should run all tests in this case
+        echo
         set_outputs_run_everything_and_exit
     fi
     echo
diff --git a/scripts/ci/static_checks/check_license.sh b/scripts/ci/static_checks/check_license.sh
index 642cfc9..a185a5a 100755
--- a/scripts/ci/static_checks/check_license.sh
+++ b/scripts/ci/static_checks/check_license.sh
@@ -37,7 +37,9 @@ function run_check_license() {
             apache/airflow:apache-rat-2020.07.10-0.13 \
             --exclude-file /opt/airflow/.rat-excludes \
             --d /opt/airflow | tee "${AIRFLOW_SOURCES}/logs/rat-results.txt" ; then
-        echo >&2 "RAT exited abnormally"
+        echo
+        echo  "${COLOR_RED_ERROR} RAT exited abnormally  ${COLOR_RESET}"
+        echo
         exit 1
     fi
 
@@ -46,13 +48,14 @@ function run_check_license() {
     errors=$(grep -F "??" "${AIRFLOW_SOURCES}/logs/rat-results.txt")
     set -e
     if test ! -z "${errors}"; then
-        echo >&2
-        echo >&2 "Could not find Apache license headers in the following files:"
-        echo >&2 "${errors}"
+        echo
+        echo  "${COLOR_RED_ERROR} Could not find Apache license headers in the following files:  ${COLOR_RESET}"
+        echo
+        echo "${errors}"
         exit 1
-        echo >&2
     else
-        echo "RAT checks passed."
+        echo
+        echo "${COLOR_GREEN_OK} RAT checks passed.  ${COLOR_RESET}"
         echo
     fi
 }
diff --git a/scripts/in_container/_in_container_script_init.sh b/scripts/in_container/_in_container_script_init.sh
index 96d99e5..562de97 100755
--- a/scripts/in_container/_in_container_script_init.sh
+++ b/scripts/in_container/_in_container_script_init.sh
@@ -24,10 +24,12 @@ IN_CONTAINER_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
 # shellcheck source=scripts/in_container/_in_container_utils.sh
 . "${IN_CONTAINER_DIR}/_in_container_utils.sh"
 
+in_container_set_colors
+
 in_container_basic_sanity_check
 
 in_container_script_start
 
-add_trap "in_container_script_end" EXIT HUP INT TERM
-add_trap "in_container_clear_tmp" EXIT HUP INT TERM
 add_trap "in_container_fix_ownership" EXIT HUP INT TERM
+add_trap "in_container_clear_tmp" EXIT HUP INT TERM
+add_trap "in_container_script_end" EXIT HUP INT TERM
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index 6e41f38..a5a827a 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -40,17 +40,19 @@ function add_trap() {
 function assert_in_container() {
     export VERBOSE=${VERBOSE:="false"}
     if [[ ! -f /.dockerenv ]]; then
-        echo >&2
-        echo >&2 "You are not inside the Airflow docker container!"
-        echo >&2 "You should only run this script in the Airflow docker container as it may override your files."
-        echo >&2 "Learn more about how we develop and test airflow in:"
-        echo >&2 "https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst"
-        echo >&2
+        echo
+        echo "${COLOR_RED_ERROR} You are not inside the Airflow docker container!  ${COLOR_RESET}"
+        echo
+        echo "You should only run this script in the Airflow docker container as it may override your files."
+        echo "Learn more about how we develop and test airflow in:"
+        echo "https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst"
+        echo
         exit 1
     fi
 }
 
 function in_container_script_start() {
+    OUT_FILE_PRINTED_ON_ERROR=$(mktemp)
     if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then
         set -x
     fi
@@ -61,17 +63,23 @@ function in_container_script_end() {
     EXIT_CODE=$?
     if [[ ${EXIT_CODE} != 0 ]]; then
         if [[ "${PRINT_INFO_FROM_SCRIPTS=="true"}" == "true" ]] ;then
-            if [[ -n ${OUT_FILE_PRINTED_ON_ERROR=} ]]; then
-                echo "  ERROR ENCOUNTERED!"
+            if [[ -f ${OUT_FILE_PRINTED_ON_ERROR} ]]; then
+                echo "###########################################################################################"
                 echo
-                echo "  Output:"
+                echo "${COLOR_BLUE} EXIT CODE: ${EXIT_CODE} in container (See above for error message). Below is the output of the last action! ${COLOR_RESET}"
                 echo
+                echo "${COLOR_BLUE}***  BEGINNING OF THE LAST COMMAND OUTPUT *** ${COLOR_RESET}"
                 cat "${OUT_FILE_PRINTED_ON_ERROR}"
+                echo "${COLOR_BLUE}***  END OF THE LAST COMMAND OUTPUT ***  ${COLOR_RESET}"
+                echo
+                echo "${COLOR_BLUE} EXIT CODE: ${EXIT_CODE} in container. The actual error might be above the output!  ${COLOR_RESET}"
+                echo
                 echo "###########################################################################################"
+            else
+                echo "########################################################################################################################"
+                echo "${COLOR_BLUE} [IN CONTAINER]   EXITING ${0} WITH EXIT CODE ${EXIT_CODE}  ${COLOR_RESET}"
+                echo "########################################################################################################################"
             fi
-            echo "###########################################################################################"
-            echo "  [IN CONTAINER]   EXITING ${0} WITH STATUS CODE ${EXIT_CODE}"
-            echo "###########################################################################################"
         fi
     fi
 
@@ -241,5 +249,24 @@ function install_released_airflow_version() {
 }
 
 
+function in_container_set_colors() {
+    COLOR_BLUE=$'\e[34m'
+    COLOR_GREEN=$'\e[32m'
+    COLOR_GREEN_OK=$'\e[32mOK.'
+    COLOR_RED=$'\e[31m'
+    COLOR_RED_ERROR=$'\e[31mERROR:'
+    COLOR_RESET=$'\e[0m'
+    COLOR_YELLOW=$'\e[33m'
+    COLOR_YELLOW_WARNING=$'\e[33mWARNING:'
+    export COLOR_BLUE
+    export COLOR_GREEN
+    export COLOR_GREEN_OK
+    export COLOR_RED
+    export COLOR_RED_ERROR
+    export COLOR_RESET
+    export COLOR_YELLOW
+    export COLOR_YELLOW_WARNING
+}
+
 export CI=${CI:="false"}
 export GITHUB_ACTIONS=${GITHUB_ACTIONS:="false"}
diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh
index 7052628..2282304 100755
--- a/scripts/in_container/check_environment.sh
+++ b/scripts/in_container/check_environment.sh
@@ -35,15 +35,14 @@ function check_service() {
         RES=$?
         set -e
         if [[ ${RES} == 0 ]]; then
-            echo -e " \e[32mOK.\e[0m"
+            echo  "${COLOR_GREEN_OK}  ${COLOR_RESET}"
             break
         else
             echo -n "."
             MAX_CHECK=$((MAX_CHECK - 1))
         fi
         if [[ ${MAX_CHECK} == 0 ]]; then
-            echo -e " \e[31mERROR!\e[0m"
-            echo "Maximum number of retries while checking service. Exiting"
+            echo "${COLOR_RED_ERROR} Maximum number of retries while checking service. Exiting ${COLOR_RESET}"
             break
         else
             sleep 1
diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh
index a645922..7ef1e7a 100755
--- a/scripts/in_container/entrypoint_ci.sh
+++ b/scripts/in_container/entrypoint_ci.sh
@@ -252,9 +252,9 @@ else
             ${TEST_TYPE} == "Integration" ]]; then
         SELECTED_TESTS=("${ALL_TESTS[@]}")
     else
-        >&2 echo
-        >&2 echo "Wrong test type ${TEST_TYPE}"
-        >&2 echo
+        echo
+        echo  "${COLOR_RED_ERROR} Wrong test type ${TEST_TYPE}  ${COLOR_RESET}"
+        echo
         exit 1
     fi
 
diff --git a/scripts/in_container/run_ci_tests.sh b/scripts/in_container/run_ci_tests.sh
index 7f2be4c..bebcce8 100755
--- a/scripts/in_container/run_ci_tests.sh
+++ b/scripts/in_container/run_ci_tests.sh
@@ -38,46 +38,53 @@ elif [[ "${RES}" != "0" ]]; then
     elif [[ ${BACKEND} == "mysql" ]]; then
         EXTRA_ARGS="--mysql-version ${MYSQL_VERSION} "
     fi
-
-    >&2 echo "***********************************************************************************************"
-    >&2 echo "*"
-    >&2 echo "* ERROR! Some tests failed, unfortunately. Those might be transient errors,"
-    >&2 echo "*        but usually you have to fix something."
-    >&2 echo "*        See the above log for details."
-    >&2 echo "*"
-    >&2 echo "***********************************************************************************************"
-    >&2 echo "*  You can easily reproduce the failed tests on your dev machine/"
-    >&2 echo "*"
-    >&2 echo "*   When you have the source branch checked out locally:"
-    >&2 echo "*"
-    >&2 echo "*     Run all tests:"
-    >&2 echo "*"
-    >&2 echo "*       ./breeze --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE}  tests"
-    >&2 echo "*"
-    >&2 echo "*     Enter docker shell:"
-    >&2 echo "*"
-    >&2 echo "*       ./breeze --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE}  shell"
-    >&2 echo "*"
+    echo """
+${COLOR_RED_ERROR}
+***********************************************************************************************
+*
+* ERROR! Some tests failed, unfortunately. Those might be transient errors,
+*        but usually you have to fix something.
+*        See the above log for details.
+*
+***********************************************************************************************
+*  You can easily reproduce the failed tests on your dev machine/
+*
+*   When you have the source branch checked out locally:
+*
+*     Run all tests:
+*
+*       ./breeze --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE}  tests
+*
+*     Enter docker shell:
+*
+*       ./breeze --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE}  shell
+*
+"""
     if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG=} != "" ]]; then
-        >&2 echo "*   When you do not have sources:"
-        >&2 echo "*"
-        >&2 echo "*     Run all tests:"
-        >&2 echo "*"
-        >&2 echo "*      ./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} tests"
-        >&2 echo "*"
-        >&2 echo "*     Enter docker shell:"
-        >&2 echo "*"
-        >&2 echo "*      ./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} shell"
-        >&2 echo "*"
+        echo """
+*   When you do not have sources:
+*
+*     Run all tests:
+*
+*      ./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} tests
+*
+*     Enter docker shell:
+*
+*      ./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} shell
+*
+"""
     fi
-    >&2 echo "*"
-    >&2 echo "*   NOTE! Once you are in the docker shell, you can run failed test with:"
-    >&2 echo "*"
-    >&2 echo "*            pytest [TEST_NAME]"
-    >&2 echo "*"
-    >&2 echo "*   You can copy the test name from the output above"
-    >&2 echo "*"
-    >&2 echo "***********************************************************************************************"
+    echo """
+*
+*   NOTE! Once you are in the docker shell, you can run failed test with:
+*
+*            pytest [TEST_NAME]
+*
+*   You can copy the test name from the output above
+*
+***********************************************************************************************
+${COLOR_RESET}
+"""
 fi
 
 MAIN_GITHUB_REPOSITORY="apache/airflow"
diff --git a/scripts/in_container/run_cli_tool.sh b/scripts/in_container/run_cli_tool.sh
index 706098e..6a4c67c 100755
--- a/scripts/in_container/run_cli_tool.sh
+++ b/scripts/in_container/run_cli_tool.sh
@@ -19,19 +19,27 @@
 set -euo pipefail
 
 if [ -z "${AIRFLOW_CI_IMAGE=}" ]; then
-    >&2 echo "Missing environment variable AIRFLOW_CI_IMAGE"
+    echo
+    echo  "${COLOR_RED_ERROR} Missing environment variable AIRFLOW_CI_IMAGE  ${COLOR_RESET}"
+    echo
     exit 1
 fi
 if [ -z "${HOST_AIRFLOW_SOURCES=}" ]; then
-    >&2 echo "Missing environment variable HOST_AIRFLOW_SOURCES"
+    echo
+    echo  "${COLOR_RED_ERROR} Missing environment variable HOST_AIRFLOW_SOURCES  ${COLOR_RESET}"
+    echo
     exit 1
 fi
 if [ -z "${HOST_USER_ID=}" ]; then
-    >&2 echo "Missing environment variable HOST_USER_ID"
+    echo
+    echo  "${COLOR_RED_ERROR} Missing environment variable HOST_USER_ID  ${COLOR_RESET}"
+    echo
     exit 1
 fi
 if [ -z "${HOST_GROUP_ID=}" ]; then
-    >&2 echo "Missing environment variable HOST_GROUP_ID"
+    echo
+    echo  "${COLOR_RED_ERROR} Missing environment variable HOST_GROUP_ID   ${COLOR_RESET}"
+    echo
     exit 1
 fi
 
@@ -122,7 +130,9 @@ case "${TOOL_NAME}" in
         COMMAND=("/usr/local/openjdk-8/bin/java" "${@}")
         ;;
     * )
-        >&2 echo "Unsupported tool name: ${TOOL_NAME}"
+        echo
+        echo  "${COLOR_RED_ERROR} Unsupported tool name: ${TOOL_NAME}  ${COLOR_RESET}"
+        echo
         exit 1
         ;;
 esac
diff --git a/scripts/in_container/run_system_tests.sh b/scripts/in_container/run_system_tests.sh
index e8c11c4..2b1181c 100755
--- a/scripts/in_container/run_system_tests.sh
+++ b/scripts/in_container/run_system_tests.sh
@@ -25,6 +25,8 @@ IN_CONTAINER_DIR=$(cd "$(dirname "$0")" || exit 1; pwd)
 # shellcheck source=scripts/in_container/_in_container_utils.sh
 . "${IN_CONTAINER_DIR}/_in_container_utils.sh"
 
+in_container_set_colors
+
 in_container_basic_sanity_check
 
 in_container_script_start
diff --git a/tests/bats/breeze/test_breeze_params.bats b/tests/bats/breeze/test_breeze_params.bats
index c103d6d..92a2686 100644
--- a/tests/bats/breeze/test_breeze_params.bats
+++ b/tests/bats/breeze/test_breeze_params.bats
@@ -31,10 +31,7 @@ teardown() {
 @test "Test missing value for a parameter" {
   export _breeze_allowed_test_params="a b c"
   run parameters::check_and_save_allowed_param "TEST_PARAM"  "Test Param" "--message"
-  assert_output "
-ERROR:  Allowed Test Param: [ a b c ]. Passed: ''.
-
-Switch to supported value with --message flag."
+  assert_output --regexp "Allowed Test Param: \[ a b c \]\. Passed: ''"
   assert_failure
 }
 
@@ -43,10 +40,7 @@ Switch to supported value with --message flag."
   export TEST_PARAM=x
   echo "a" > "${AIRFLOW_SOURCES}/.build/.TEST_PARAM"
   run parameters::check_and_save_allowed_param "TEST_PARAM"  "Test Param" "--message"
-  assert_output "
-ERROR:  Allowed Test Param: [ a b c ]. Passed: 'x'.
-
-Switch to supported value with --message flag."
+  assert_output --regexp "Allowed Test Param: \[ a b c \]\. Passed: 'x"
   assert_exist "${AIRFLOW_SOURCES}/.build/.TEST_PARAM"
   assert_file_contains "${AIRFLOW_SOURCES}/.build/.TEST_PARAM" "^a$"
   assert_failure 1
@@ -57,12 +51,7 @@ Switch to supported value with --message flag."
   export TEST_PARAM=x
   echo "x" > "${AIRFLOW_SOURCES}/.build/.TEST_PARAM"
   run parameters::check_and_save_allowed_param "TEST_PARAM"  "Test Param" "--message"
-  assert_output "
-ERROR:  Allowed Test Param: [ a b c ]. Passed: 'x'.
-
-Switch to supported value with --message flag.
-
-Removing ${AIRFLOW_SOURCES}/.build/.TEST_PARAM. Next time you run it, it should be OK."
+  assert_output --regexp "Allowed Test Param: \[ a b c \]\. Passed: 'x'"
   assert_not_exist "${AIRFLOW_SOURCES}/.build/.TEST_PARAM"
   assert_failure 1
 }
diff --git a/tests/bats/in_container/test_in_container.bats b/tests/bats/in_container/test_in_container.bats
index fe2e0c3..8386ad2 100644
--- a/tests/bats/in_container/test_in_container.bats
+++ b/tests/bats/in_container/test_in_container.bats
@@ -23,6 +23,8 @@ source "/opt/bats/lib/load.bash"
 setup() {
 # shellcheck source=scripts/in_container/_in_container_utils.sh
     source "${AIRFLOW_SOURCES}/scripts/in_container/_in_container_utils.sh"
+
+    in_container_set_colors
 }
 
 @test "test in_container" {


[airflow] 19/44: Changes release image preparation to use PyPI packages (#12990)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit a7a67290648a6f86d41b8b6fd0dda645769c608b
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sat Dec 12 12:01:58 2020 +0100

    Changes release image preparation to use PyPI packages (#12990)
    
    * Changes release image preparation to use PyPI packages
    
    Since we released all teh provider packages to PyPI now in
    RC version, we can now change the mechanism to prepare the
    production to use released packages in case of tagged builds.
    
    The "branch" production images are still prepared using the
    CI images and .whl packages built from sources, but the
    release packages are built from officially released PyPI
    packages.
    
    Also some corrections and updates were made to the release process:
    
    * the constraint tags when RC candidate is sent should contain
      rcn suffix.
    
    * there was missing step about pushing the release tag once the
      release is out
    
    * pushing tag to GitHub should be done after the PyPI packages
      are uploaded, so that automated image building in DockerHub
      can use those packages.
    
    * added a note that in case we will release some provider
      packages that depend on the just released airflow version
      they shoudl be released after airflow is in PyPI but before
      the tag is pushed to GitHub (also to allow the image to be
      build automatically from the released packages)
    
    Fixes: #12970
    
    * Update dev/README_RELEASE_AIRFLOW.md
    
    Co-authored-by: Ash Berlin-Taylor <as...@firemirror.com>
    
    * Update dev/README_RELEASE_AIRFLOW.md
    
    Co-authored-by: Ash Berlin-Taylor <as...@firemirror.com>
    
    Co-authored-by: Ash Berlin-Taylor <as...@firemirror.com>
    (cherry picked from commit db027735a7ee03d2678c8ca8712dfd75de148261)
---
 BREEZE.rst                                  |    4 +-
 Dockerfile                                  |   62 +-
 Dockerfile.ci                               |    1 +
 IMAGES.rst                                  |    4 +-
 breeze-complete                             |    3 +-
 dev/README.md                               | 1511 +--------------------------
 dev/README_RELEASE_AIRFLOW.md               |  733 +++++++++++++
 docs/production-deployment.rst              |  232 ++--
 scripts/ci/images/ci_build_dockerhub.sh     |   95 +-
 scripts/ci/libraries/_build_images.sh       |   68 +-
 scripts/ci/libraries/_initialization.sh     |   60 +-
 scripts/ci/libraries/_parameters.sh         |    1 -
 scripts/in_container/_in_container_utils.sh |  278 ++++-
 13 files changed, 1276 insertions(+), 1776 deletions(-)

diff --git a/BREEZE.rst b/BREEZE.rst
index 1ed3cfe..633fb4d 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1254,7 +1254,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.14 1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 wheel none
+                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
@@ -2209,7 +2209,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.14 1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 wheel none
+                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
diff --git a/Dockerfile b/Dockerfile
index 23a9915..eecc683 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -167,15 +167,17 @@ ENV AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}
 ENV PATH=${PATH}:/root/.local/bin
 RUN mkdir -p /root/.local/bin
 
-ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
-ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES}
-
 RUN if [[ -f /docker-context-files/.pypirc ]]; then \
         cp /docker-context-files/.pypirc /root/.pypirc; \
     fi
 
 RUN pip install --upgrade "pip==${PIP_VERSION}"
 
+# By default we do not use pre-cached packages, but in CI/Breeze environment we override this to speed up
+# builds in case setup.py/setup.cfg changed. This is pure optimisation of CI/Breeze builds.
+ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
+ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES}
+
 # In case of Production build image segment we want to pre-install master version of airflow
 # dependencies from GitHub so that we do not have to always reinstall it from the scratch.
 RUN if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" ]]; then \
@@ -188,10 +190,13 @@ RUN if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" ]]; then \
           && pip uninstall --yes apache-airflow; \
     fi
 
-ARG AIRFLOW_SOURCES_FROM="."
+# By default we install latest airflow from PyPI so we do not need to copy sources of Airflow
+# but in case of breeze/CI builds we use latest sources and we override those
+# those SOURCES_FROM/TO with "." and "/opt/airflow" respectively
+ARG AIRFLOW_SOURCES_FROM="empty"
 ENV AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM}
 
-ARG AIRFLOW_SOURCES_TO="/opt/airflow"
+ARG AIRFLOW_SOURCES_TO="/empty"
 ENV AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES_TO}
 
 COPY ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO}
@@ -199,28 +204,41 @@ COPY ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO}
 ARG CASS_DRIVER_BUILD_CONCURRENCY
 ENV CASS_DRIVER_BUILD_CONCURRENCY=${CASS_DRIVER_BUILD_CONCURRENCY}
 
+# This is airflow version that is put in the label of the image build
 ARG AIRFLOW_VERSION
 ENV AIRFLOW_VERSION=${AIRFLOW_VERSION}
 
 ARG ADDITIONAL_PYTHON_DEPS=""
 ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS}
 
-ARG AIRFLOW_INSTALL_SOURCES="."
-ENV AIRFLOW_INSTALL_SOURCES=${AIRFLOW_INSTALL_SOURCES}
+# Determines the way airflow is installed. By default we install airflow from PyPI `apache-airflow` package
+# But it also can be `.` from local installation or GitHub URL pointing to specific branch or tag
+# Of Airflow. Note That for local source installation you need to have local sources of
+# Airflow checked out together with the Dockerfile and AIRFLOW_SOURCES_FROM and AIRFLOW_SOURCES_TO
+# set to "." and "/opt/airflow" respectively.
+ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow"
+ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD}
 
+# By default latest released version of airflow is installed (when empty) but this value can be overriden
+# and we can install specific version of airflow this way.
 ARG AIRFLOW_INSTALL_VERSION=""
 ENV AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION}
 
-ARG AIRFLOW_LOCAL_PIP_WHEELS=""
-ENV AIRFLOW_LOCAL_PIP_WHEELS=${AIRFLOW_LOCAL_PIP_WHEELS}
-
-ARG INSTALL_AIRFLOW_VIA_PIP="true"
-ENV INSTALL_AIRFLOW_VIA_PIP=${INSTALL_AIRFLOW_VIA_PIP}
-
-ARG SLUGIFY_USES_TEXT_UNIDECODE=""
-ENV SLUGIFY_USES_TEXT_UNIDECODE=${SLUGIFY_USES_TEXT_UNIDECODE}
-
-ARG INSTALL_PROVIDERS_FROM_SOURCES="true"
+# We can seet this value to true in case we want to install .whl .tar.gz packages placed in the
+# docker-context-files folder. This can be done for both - additional packages you want to install
+# and for airflow as well (you have to set INSTALL_FROM_PYPI to false in this case)
+ARG INSTALL_FROM_DOCKER_CONTEXT_FILES=""
+ENV INSTALL_FROM_DOCKER_CONTEXT_FILES=${INSTALL_FROM_DOCKER_CONTEXT_FILES}
+
+# By default we install latest airflow from PyPI. You can set it to false if you want to install
+# Airflow from the .whl or .tar.gz packages placed in `docker-context-files` folder.
+ARG INSTALL_FROM_PYPI="true"
+ENV INSTALL_FROM_PYPI=${INSTALL_FROM_PYPI}
+
+# By default we install providers from PyPI but in case of Breze build we want to install providers
+# from local sources without the neeed of preparing provider packages upfront. This value is
+# automatically overridden by Breeze scripts.
+ARG INSTALL_PROVIDERS_FROM_SOURCES="false"
 ENV INSTALL_PROVIDERS_FROM_SOURCES=${INSTALL_PROVIDERS_FROM_SOURCES}
 
 WORKDIR /opt/airflow
@@ -229,16 +247,16 @@ WORKDIR /opt/airflow
 RUN if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then \
         AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,}; \
     fi; \
-    if [[ ${INSTALL_AIRFLOW_VIA_PIP} == "true" ]]; then \
-        pip install --user "${AIRFLOW_INSTALL_SOURCES}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" \
+    if [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
+        pip install --user "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" \
             --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}"; \
     fi; \
     if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \
         pip install --user ${ADDITIONAL_PYTHON_DEPS} --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}"; \
     fi; \
-    if [[ ${AIRFLOW_LOCAL_PIP_WHEELS} == "true" ]]; then \
+    if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \
         if ls /docker-context-files/*.whl 1> /dev/null 2>&1; then \
-            pip install --user --no-deps /docker-context-files/*.whl; \
+            pip install --user --no-deps /docker-context-files/*.{whl,tar.gz}; \
         fi ; \
     fi; \
     find /root/.local/ -name '*.pyc' -print0 | xargs -0 rm -r || true ; \
@@ -273,6 +291,7 @@ LABEL org.apache.airflow.distro="debian" \
   org.apache.airflow.module="airflow" \
   org.apache.airflow.component="airflow" \
   org.apache.airflow.image="airflow-build-image" \
+  org.apache.airflow.version="${AIRFLOW_VERSION}" \
   org.apache.airflow.buildImage.buildId=${BUILD_ID} \
   org.apache.airflow.buildImage.commitSha=${COMMIT_SHA}
 
@@ -434,6 +453,7 @@ LABEL org.apache.airflow.distro="debian" \
   org.apache.airflow.module="airflow" \
   org.apache.airflow.component="airflow" \
   org.apache.airflow.image="airflow" \
+  org.apache.airflow.version="${AIRFLOW_VERSION}" \
   org.apache.airflow.uid="${AIRFLOW_UID}" \
   org.apache.airflow.gid="${AIRFLOW_GID}" \
   org.apache.airflow.mainImage.buildId=${BUILD_ID} \
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 32b9383..2210989 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -394,6 +394,7 @@ LABEL org.apache.airflow.distro="debian" \
   org.apache.airflow.module="airflow" \
   org.apache.airflow.component="airflow" \
   org.apache.airflow.image="airflow-ci" \
+  org.apache.airflow.version="${AIRFLOW_VERSION}" \
   org.apache.airflow.uid="0" \
   org.apache.airflow.gid="0" \
   org.apache.airflow.buildId=${BUILD_ID} \
diff --git a/IMAGES.rst b/IMAGES.rst
index 94ad6fd..8804d1f 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -310,7 +310,7 @@ additional apt dev and runtime dependencies.
   docker build . -f Dockerfile.ci \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
@@ -345,7 +345,7 @@ based on example in `this comment <https://github.com/apache/airflow/issues/8605
   docker build . -f Dockerfile.ci \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
diff --git a/breeze-complete b/breeze-complete
index 5502eec..819938b 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -52,13 +52,12 @@ _breeze_allowed_package_formats="wheel sdist both"
 
 _breeze_allowed_install_airflow_versions=$(cat <<-EOF
 1.10.14
-1.10.13
 1.10.12
 1.10.11
 1.10.10
 1.10.9
-wheel
 none
+wheel
 EOF
 )
 
diff --git a/dev/README.md b/dev/README.md
index b5ad9a3..805e976 100644
--- a/dev/README.md
+++ b/dev/README.md
@@ -15,6 +15,25 @@
  KIND, either express or implied.  See the License for the
  specific language governing permissions and limitations
  under the License.
+ -->
+
+README.md<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
 -->
 <!-- START doctoc generated TOC please keep comment here to allow auto update -->
 <!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
@@ -22,21 +41,10 @@
 
 - [Apache Airflow source releases](#apache-airflow-source-releases)
   - [Apache Airflow Package](#apache-airflow-package)
-  - [Backport Provider packages](#backport-provider-packages)
 - [Prerequisites for the release manager preparing the release](#prerequisites-for-the-release-manager-preparing-the-release)
   - [Upload Public keys to id.apache.org](#upload-public-keys-to-idapacheorg)
   - [Configure PyPI uploads](#configure-pypi-uploads)
   - [Hardware used to prepare and verify the packages](#hardware-used-to-prepare-and-verify-the-packages)
-- [Apache Airflow packages](#apache-airflow-packages)
-  - [Prepare the Apache Airflow Package RC](#prepare-the-apache-airflow-package-rc)
-  - [Vote and verify the Apache Airflow release candidate](#vote-and-verify-the-apache-airflow-release-candidate)
-  - [Publish the final Apache Airflow release](#publish-the-final-apache-airflow-release)
-- [Provider Packages](#provider-packages)
-  - [Decide when to release](#decide-when-to-release)
-  - [Prepare the Backport Provider Packages RC](#prepare-the-backport-provider-packages-rc)
-  - [Vote and verify the Backport Providers release candidate](#vote-and-verify-the-backport-providers-release-candidate)
-  - [Publish the final releases of backport packages](#publish-the-final-releases-of-backport-packages)
-  - [Prepare the Regular Provider Packages Alpha](#prepare-the-regular-provider-packages-alpha)
 
 <!-- END doctoc generated TOC please keep comment here to allow auto update -->
 
@@ -52,11 +60,11 @@ The Apache Airflow releases are one of the two types:
 This package contains sources that allow the user building fully-functional Apache Airflow 2.0 package.
 They contain sources for:
 
- * "apache-airflow" python package that installs "airflow" Python package and includes
-   all the assets required to release the webserver UI coming with Apache Airflow
- * Dockerfile and corresponding scripts that build and use an official DockerImage
- * Breeze development environment that helps with building images and testing locally
-   apache airflow built from sources
+* "apache-airflow" python package that installs "airflow" Python package and includes
+  all the assets required to release the webserver UI coming with Apache Airflow
+* Dockerfile and corresponding scripts that build and use an official DockerImage
+* Breeze development environment that helps with building images and testing locally
+  apache airflow built from sources
 
 In the future (Airflow 2.0) this package will be split into separate "core" and "providers" packages that
 will be distributed separately, following the mechanisms introduced in Backport Package Providers. We also
@@ -76,38 +84,8 @@ cannot or do not want to build the packages themselves can use them as a conveni
 Apache Airflow, however they are not considered as "official source releases". You can read more
 details about it in the [ASF Release Policy](http://www.apache.org/legal/release-policy.html).
 
-This document describes the process of releasing both - official source packages and convenience
-packages for Apache Airflow packages.
-
-## Backport Provider packages
-
-The Backport Provider packages are packages (per provider) that make it possible to easily use Hooks,
-Operators, Sensors, and Secrets from the 2.0 version of Airflow in the 1.10.* series.
-
-Once you release the packages, you can simply install them with:
-
-```
-pip install apache-airflow-backport-providers-<PROVIDER>[<EXTRAS>]
-```
-
-Where `<PROVIDER>` is the provider id and `<EXTRAS>` are optional extra packages to install.
-You can find the provider packages dependencies and extras in the README.md files in each provider
-package (in `airflow/providers/<PROVIDER>` folder) as well as in the PyPI installation page.
-
-Backport providers are a great way to migrate your DAGs to Airflow-2.0 compatible DAGs. You can
-switch to the new Airflow-2.0 packages in your DAGs, long before you attempt to migrate
-airflow to 2.0 line.
-
-The sources released in SVN allow to build all the provider packages by the user, following the
-instructions and scripts provided. Those are also "official_source releases" as described in the
-[ASF Release Policy](http://www.apache.org/legal/release-policy.html) and they are available
-via [Official Apache Download sources](https://downloads.apache.org/airflow/backport-providers/).
-
-There are also 50+ convenience packages released as "apache-airflow-backport-providers" separately in
-PyPI. You can find them all by [PyPI query](https://pypi.org/search/?q=apache-airflow-backport-providers)
-
-The document describes the process of releasing both - official source packages and convenience
-packages for Backport Provider Packages.
+Detailed instruction of releasing Provider Packages can be found in the
+[README_RELEASE_AIRFLOW.md](README_RELEASE_AIRFLOW.md)
 
 # Prerequisites for the release manager preparing the release
 
@@ -177,7 +155,7 @@ password=<API Upload Token>
 Set proper permissions for the pypirc file:
 
 ```shell script
-$ chmod 600 ~/.pypirc
+chmod 600 ~/.pypirc
 ```
 
 - Install [twine](https://pypi.org/project/twine/) if you do not have it already (it can be done
@@ -190,9 +168,7 @@ pip install twine
 (more details [here](https://peterdowns.com/posts/first-time-with-pypi.html).)
 
 - Set proper permissions for the pypirc file:
-`$ chmod 600 ~/.pypirc`
-
-- Confirm that `airflow/version.py` is set properly.
+  `$ chmod 600 ~/.pypirc`
 
 
 ## Hardware used to prepare and verify the packages
@@ -202,1434 +178,3 @@ by the committer acting as release manager. While strictly speaking, releases mu
 on hardware owned and controlled by the committer, for practical reasons it's best if the packages are
 prepared using such hardware. More information can be found in this
 [FAQ](http://www.apache.org/legal/release-policy.html#owned-controlled-hardware)
-
-# Apache Airflow packages
-
-## Prepare the Apache Airflow Package RC
-
-### Build RC artifacts (both source packages and convenience packages)
-
-The Release Candidate artifacts we vote upon should be the exact ones we vote against, without any modification than renaming – i.e. the contents of the files must be the same between voted release canidate and final release. Because of this the version in the built artifacts that will become the official Apache releases must not include the rcN suffix.
-
-- Set environment variables
-
-    ```shell script
-    # Set Version
-    export VERSION=1.10.2rc3
-
-
-    # Set AIRFLOW_REPO_ROOT to the path of your git repo
-    export AIRFLOW_REPO_ROOT=$(pwd)
-
-
-    # Example after cloning
-    git clone https://github.com/apache/airflow.git airflow
-    cd airflow
-    export AIRFLOW_REPO_ROOT=$(pwd)
-    ```
-
-- Set your version to 1.10.2 in `airflow/version.py` (without the RC tag)
-- Commit the version change.
-
-- Tag your release
-
-    ```shell script
-    git tag -s ${VERSION}
-    ```
-
-- Clean the checkout: the sdist step below will
-
-    ```shell script
-    git clean -fxd
-    ```
-
-- Tarball the repo
-
-    ```shell script
-    git archive --format=tar.gz ${VERSION} --prefix=apache-airflow-${VERSION}/ -o apache-airflow-${VERSION}-source.tar.gz`
-    ```
-
-
-- Generate sdist
-
-    NOTE: Make sure your checkout is clean at this stage - any untracked or changed files will otherwise be included
-     in the file produced.
-
-    ```shell script
-    python setup.py compile_assets sdist bdist_wheel
-    ```
-
-- Rename the sdist
-
-    ```shell script
-    mv dist/apache-airflow-${VERSION%rc?}.tar.gz apache-airflow-${VERSION}-bin.tar.gz
-    mv dist/apache_airflow-${VERSION%rc?}-py2.py3-none-any.whl apache_airflow-${VERSION}-py2.py3-none-any.whl
-    ```
-
-- Generate SHA512/ASC (If you have not generated a key yet, generate it by following instructions on http://www.apache.org/dev/openpgp.html#key-gen-generate-key)
-
-    ```shell script
-    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache-airflow-${VERSION}-source.tar.gz
-    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache-airflow-${VERSION}-bin.tar.gz
-    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache_airflow-${VERSION}-py2.py3-none-any.whl
-    ```
-
-- Push Tags
-
-    ```shell script
-    git push origin ${VERSION}
-    ```
-
-- Push the artifacts to ASF dev dist repo
-```
-# First clone the repo
-svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev
-
-# Create new folder for the release
-cd airflow-dev
-svn mkdir ${VERSION}
-
-# Move the artifacts to svn folder & commit
-mv ${AIRFLOW_REPO_ROOT}/apache{-,_}airflow-${VERSION}* ${VERSION}/
-cd ${VERSION}
-svn add *
-svn commit -m "Add artifacts for Airflow ${VERSION}"
-```
-
-### Prepare PyPI convenience "snapshot" packages
-
-At this point we have the artefact that we vote on, but as a convenience to developers we also want to
-publish "snapshots" of the RC builds to pypi for installing via pip. To do this we need to
-
-- Edit the `airflow/version.py` to include the RC suffix.
-
-- Build the package:
-
-    ```shell script
-    python setup.py compile_assets sdist bdist_wheel
-    ```
-
-- Verify the artifacts that would be uploaded:
-
-    ```shell script
-    twine check dist/*
-    ```
-
-- Upload the package to PyPi's test environment:
-
-    ```shell script
-    twine upload -r pypitest dist/*
-    ```
-
-- Verify that the test package looks good by downloading it and installing it into a virtual environment. The package download link is available at:
-https://test.pypi.org/project/apache-airflow/#files
-
-- Upload the package to PyPi's production environment:
-`twine upload -r pypi dist/*`
-
-- Again, confirm that the package is available here:
-https://pypi.python.org/pypi/apache-airflow
-
-- Throw away the change - we don't want to commit this: `git checkout airflow/version.py`
-
-It is important to stress that this snapshot should not be named "release", and it
-is not supposed to be used by and advertised to the end-users who do not read the devlist.
-
-## Vote and verify the Apache Airflow release candidate
-
-### Prepare Vote email on the Apache Airflow release candidate
-
-- Use the dev/airflow-jira script to generate a list of Airflow JIRAs that were closed in the release.
-
-- Send out a vote to the dev@airflow.apache.org mailing list:
-
-Subject:
-```
-[VOTE] Airflow 1.10.2rc3
-```
-
-Body:
-
-```
-Hey all,
-
-I have cut Airflow 1.10.2 RC3. This email is calling a vote on the release,
-which will last for 72 hours. Consider this my (binding) +1.
-
-Airflow 1.10.2 RC3 is available at:
-https://dist.apache.org/repos/dist/dev/airflow/1.10.2rc3/
-
-*apache-airflow-1.10.2rc3-source.tar.gz* is a source release that comes
-with INSTALL instructions.
-*apache-airflow-1.10.2rc3-bin.tar.gz* is the binary Python "sdist" release.
-
-Public keys are available at:
-https://dist.apache.org/repos/dist/release/airflow/KEYS
-
-Only votes from PMC members are binding, but the release manager should encourage members of the community
-to test the release and vote with "(non-binding)".
-
-The test procedure for PMCs and Contributors who would like to test this RC are described in
-https://github.com/apache/airflow/blob/master/dev/README.md#vote-and-verify-the-apache-airflow-release-candidate
-
-Please note that the version number excludes the `rcX` string, so it's now
-simply 1.10.2. This will allow us to rename the artifact without modifying
-the artifact checksums when we actually release.
-
-
-Changes since 1.10.2rc2:
-*Bugs*:
-[AIRFLOW-3732] Fix issue when trying to edit connection in RBAC UI
-[AIRFLOW-2866] Fix missing CSRF token head when using RBAC UI (#3804)
-...
-
-
-*Improvements*:
-[AIRFLOW-3302] Small CSS fixes (#4140)
-[Airflow-2766] Respect shared datetime across tabs
-...
-
-
-*New features*:
-[AIRFLOW-2874] Enables FAB's theme support (#3719)
-[AIRFLOW-3336] Add new TriggerRule for 0 upstream failures (#4182)
-...
-
-
-*Doc-only Change*:
-[AIRFLOW-XXX] Fix BashOperator Docstring (#4052)
-[AIRFLOW-3018] Fix Minor issues in Documentation
-...
-
-Cheers,
-<your name>
-```
-
-### Verify the release candidate by PMCs (legal)
-
-#### PMC responsibilities
-
-The PMCs should verify the releases in order to make sure the release is following the
-[Apache Legal Release Policy](http://www.apache.org/legal/release-policy.html).
-
-At least 3 (+1) votes should be recorded in accordance to
-[Votes on Package Releases](https://www.apache.org/foundation/voting.html#ReleaseVotes)
-
-The legal checks include:
-
-* checking if the packages are present in the right dist folder on svn
-* verifying if all the sources have correct licences
-* verifying if release manager signed the releases with the right key
-* verifying if all the checksums are valid for the release
-
-#### SVN check
-
-The files should be present in the sub-folder of
-[Airflow dist](https://dist.apache.org/repos/dist/dev/airflow/)
-
-The following files should be present (9 files):
-
-* -bin-tar.gz + .asc + .sha512
-* -source.tar.gz + .asc + .sha512
-* -.whl + .asc + .sha512
-
-As a PMC you should be able to clone the SVN repository:
-
-```shell script
-svn co https://dist.apache.org/repos/dist/dev/airflow
-```
-
-Or update it if you already checked it out:
-
-```shell script
-svn update .
-```
-
-#### Verify the licences
-
-This can be done with the Apache RAT tool.
-
-* Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the sources,
-  the jar is inside)
-* Unpack the -source.tar.gz to a folder
-* Enter the folder and run the check (point to the place where you extracted the .jar)
-
-```shell script
-java -jar ../../apache-rat-0.13/apache-rat-0.13.jar -E .rat-excludes -d .
-```
-
-#### Verify the signatures
-
-Make sure you have the key of person signed imported in your GPG. You can find the valid keys in
-[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS).
-
-You can import the whole KEYS file:
-
-```shell script
-gpg --import KEYS
-```
-
-You can also import the keys individually from a keyserver. The below one uses Kaxil's key and
-retrieves it from the default GPG keyserver
-[OpenPGP.org](https://keys.openpgp.org):
-
-```shell script
-gpg --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-```
-
-You should choose to import the key when asked.
-
-Note that by being default, the OpenPGP server tends to be overloaded often and might respond with
-errors or timeouts. Many of the release managers also uploaded their keys to the
-[GNUPG.net](https://keys.gnupg.net) keyserver, and you can retrieve it from there.
-
-```shell script
-gpg --keyserver keys.gnupg.net --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-```
-
-Once you have the keys, the signatures can be verified by running this:
-
-```shell script
-for i in *.asc
-do
-   echo "Checking $i"; gpg --verify `basename $i .sha512 `
-done
-```
-
-This should produce results similar to the below. The "Good signature from ..." is indication
-that the signatures are correct. Do not worry about the "not certified with a trusted signature"
-warning. Most of the certificates used by release managers are self signed, that's why you get this
-warning. By importing the server in the previous step and importing it via ID from
-[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS) page, you know that
-this is a valid Key already.
-
-```
-Checking apache-airflow-1.10.12rc4-bin.tar.gz.asc
-gpg: assuming signed data in 'apache-airflow-1.10.12rc4-bin.tar.gz'
-gpg: Signature made sob, 22 sie 2020, 20:28:28 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.asc
-gpg: assuming signed data in 'apache_airflow-1.10.12rc4-py2.py3-none-any.whl'
-gpg: Signature made sob, 22 sie 2020, 20:28:31 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-Checking apache-airflow-1.10.12rc4-source.tar.gz.asc
-gpg: assuming signed data in 'apache-airflow-1.10.12rc4-source.tar.gz'
-gpg: Signature made sob, 22 sie 2020, 20:28:25 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-```
-
-#### Verify the SHA512 sum
-
-Run this:
-
-```shell script
-for i in *.sha512
-do
-    echo "Checking $i"; gpg --print-md SHA512 `basename $i .sha512 ` | diff - $i
-done
-```
-
-You should get output similar to:
-
-```
-Checking apache-airflow-1.10.12rc4-bin.tar.gz.sha512
-Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.sha512
-Checking apache-airflow-1.10.12rc4-source.tar.gz.sha512
-```
-
-### Verify if the release candidate "works" by Contributors
-
-This can be done (and we encourage to) by any of the Contributors. In fact, it's best if the
-actual users of Apache Airflow test it in their own staging/test installations. Each release candidate
-is available on PyPI apart from SVN packages, so everyone should be able to install
-the release candidate version of Airflow via simply (<VERSION> is 1.10.12 for example, and <X> is
-release candidate number 1,2,3,....).
-
-```shell script
-pip install apache-airflow==<VERSION>rc<X>
-```
-Optionally it can be followed with constraints
-
-```shell script
-pip install apache-airflow==<VERSION>rc<X> \
-  --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-<VERSION>/constraints-3.6.txt"`
-```
-
-Note that the constraints contain python version that you are installing it with.
-
-You can use any of the installation methods you prefer (you can even install it via the binary wheel
-downloaded from the SVN).
-
-There is also an easy way of installation with Breeze if you have the latest sources of Apache Airflow.
-Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler:
-
-```shell script
-./breeze start-airflow --install-airflow-version <VERSION>rc<X> --python 3.7 --backend postgres
-```
-
-For 1.10 releases you can also use `--no-rbac-ui` flag disable RBAC UI of Airflow:
-
-```shell script
-./breeze start-airflow --install-airflow-version <VERSION>rc<X> --python 3.7 --backend postgres --no-rbac-ui
-```
-
-Once you install and run Airflow, you should perform any verification you see as necessary to check
-that the Airflow works as you expected.
-
-## Publish the final Apache Airflow release
-
-### Summarize the voting for the Apache Airflow release
-
-Once the vote has been passed, you will need to send a result vote to dev@airflow.apache.org:
-
-Subject:
-```
-[RESULT][VOTE] Airflow 1.10.2rc3
-```
-
-Message:
-
-```
-Hello,
-
-Apache Airflow 1.10.2 (based on RC3) has been accepted.
-
-4 “+1” binding votes received:
-- Kaxil Naik  (binding)
-- Bolke de Bruin (binding)
-- Ash Berlin-Taylor (binding)
-- Tao Feng (binding)
-
-
-4 "+1" non-binding votes received:
-
-- Deng Xiaodong (non-binding)
-- Stefan Seelmann (non-binding)
-- Joshua Patchus (non-binding)
-- Felix Uellendall (non-binding)
-
-Vote thread:
-https://lists.apache.org/thread.html/736404ca3d2b2143b296d0910630b9bd0f8b56a0c54e3a05f4c8b5fe@%3Cdev.airflow.apache.org%3E
-
-I'll continue with the release process, and the release announcement will follow shortly.
-
-Cheers,
-<your name>
-```
-
-
-### Publish release to SVN
-
-You need to migrate the RC artifacts that passed to this repository:
-https://dist.apache.org/repos/dist/release/airflow/
-(The migration should include renaming the files so that they no longer have the RC number in their filenames.)
-
-The best way of doing this is to svn cp  between the two repos (this avoids having to upload the binaries again, and gives a clearer history in the svn commit logs):
-
-```shell script
-# First clone the repo
-export RC=1.10.4rc5
-export VERSION=${RC/rc?/}
-svn checkout https://dist.apache.org/repos/dist/release/airflow airflow-release
-
-# Create new folder for the release
-cd airflow-release
-svn mkdir ${VERSION}
-cd ${VERSION}
-
-# Move the artifacts to svn folder & commit
-for f in ../../airflow-dev/$RC/*; do svn cp $f ${$(basename $f)/rc?/}; done
-svn commit -m "Release Airflow ${VERSION} from ${RC}"
-
-# Remove old release
-# http://www.apache.org/legal/release-policy.html#when-to-archive
-cd ..
-export PREVIOUS_VERSION=1.10.1
-svn rm ${PREVIOUS_VERSION}
-svn commit -m "Remove old release: ${PREVIOUS_VERSION}"
-```
-
-Verify that the packages appear in [airflow](https://dist.apache.org/repos/dist/release/airflow/)
-
-### Prepare PyPI "release" packages
-
-At this point we release an official package:
-
-- Build the package:
-
-    ```shell script
-    python setup.py compile_assets sdist bdist_wheel`
-    ```
-
-- Verify the artifacts that would be uploaded:
-
-    ```shell script
-    twine check dist/*`
-    ```
-
-- Upload the package to PyPi's test environment:
-
-    ```shell script
-    twine upload -r pypitest dist/*
-    ```
-
-- Verify that the test package looks good by downloading it and installing it into a virtual environment.
-    The package download link is available at: https://test.pypi.org/project/apache-airflow/#files
-
-- Upload the package to PyPi's production environment:
-
-    ```shell script
-    twine upload -r pypi dist/*
-    ```
-
-- Again, confirm that the package is available here: https://pypi.python.org/pypi/apache-airflow
-
-### Update CHANGELOG.md
-
-- Get a diff between the last version and the current version:
-
-    ```shell script
-    $ git log 1.8.0..1.9.0 --pretty=oneline
-    ```
-- Update CHANGELOG.md with the details, and commit it.
-
-### Notify developers of release
-
-- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that
-the artifacts have been published:
-
-Subject:
-```shell script
-cat <<EOF
-Airflow ${VERSION} is released
-EOF
-```
-
-Body:
-```shell script
-cat <<EOF
-Dear Airflow community,
-
-I'm happy to announce that Airflow ${VERSION} was just released.
-
-The source release, as well as the binary "sdist" release, are available
-here:
-
-https://dist.apache.org/repos/dist/release/airflow/${VERSION}/
-
-We also made this version available on PyPi for convenience (`pip install apache-airflow`):
-
-https://pypi.python.org/pypi/apache-airflow
-
-The documentation is available on:
-https://airflow.apache.org/
-https://airflow.apache.org/1.10.2/
-https://airflow.readthedocs.io/en/1.10.2/
-https://airflow.readthedocs.io/en/stable/
-
-Find the CHANGELOG here for more details:
-
-https://airflow.apache.org/changelog.html#airflow-1-10-2-2019-01-19
-
-Cheers,
-<your name>
-EOF
-```
-
-### Update Announcements page
-
-Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/)
-
-
------------------------------------------------------------------------------------------------------------
-
-
-# Provider Packages
-
-You can read more about the command line tools used to generate the packages and the two types of
-packages we have (Backport and Regular Provider Packages) in [Provider packages](PROVIDER_PACKAGES.md).
-
-## Decide when to release
-
-You can release provider packages separately from the main Airflow on an ad-hoc basis, whenever we find that
-a given provider needs to be released - due to new features or due to bug fixes.
-You can release each provider package separately, but due to voting and release overhead we try to group
-releases of provider packages together.
-
-### Backport provider packages versioning
-
-We are using the [CALVER](https://calver.org/) versioning scheme for the backport packages. We also have an
-automated way to prepare and build the packages, so it should be very easy to release the packages often and
-separately. Backport packages will be maintained for three months after 2.0.0 version of Airflow, and it is
-really a bridge, allowing people to migrate to Airflow 2.0 in stages, so the overhead of maintaining
-semver versioning does not apply there - subsequent releases might be backward-incompatible, and it is
-not indicated by the version of the packages.
-
-### Regular provider packages versioning
-
-We are using the [SEMVER](https://semver.org/) versioning scheme for the regular packages. This is in order
-to give the users confidence about maintaining backwards compatibility in the new releases of those
-packages.
-
-Details about maintaining the SEMVER version are going to be discussed and implemented in
-[the related issue](https://github.com/apache/airflow/issues/11425)
-
-## Prepare the Backport Provider Packages RC
-
-### Generate release notes
-
-Prepare release notes for all the packages you plan to release. Where YYYY.MM.DD is the CALVER
-date for the packages.
-
-```shell script
-./breeze --backports prepare-provider-readme YYYY.MM.DD [packages]
-```
-
-If you iterate with merges and release candidates you can update the release date without providing
-the date (to update the existing release notes)
-
-```shell script
-./breeze --backports prepare-provider-readme google
-```
-
-Generated readme files should be eventually committed to the repository.
-
-### Build an RC release for SVN apache upload
-
-The Release Candidate artifacts we vote upon should be the exact ones we vote against, without any
-modification than renaming i.e. the contents of the files must be the same between voted
-release candidate and final release. Because of this the version in the built artifacts
-that will become the official Apache releases must not include the rcN suffix. They also need
-to be signed and have checksum files. You can generate the checksum/signature files by running
-the "dev/sign.sh" script (assuming you have the right PGP key set-up for signing). The script
-generates corresponding .asc and .sha512 files for each file to sign.
-
-#### Build and sign the source and convenience packages
-
-* Set environment variables (version and root of airflow repo)
-
-```shell script
-export VERSION=2020.5.20rc2
-export AIRFLOW_REPO_ROOT=$(pwd)
-
-```
-
-* Build the source package:
-
-```shell script
-./provider_packages/build_source_package.sh --backports
-```
-
-It will generate `apache-airflow-backport-providers-${VERSION}-source.tar.gz`
-
-* Generate the packages - since we are preparing packages for SVN repo, we should use the right switch. Note
-  that this will clean up dist folder before generating the packages, so it will only contain the packages
-  you intended to build.
-
-```shell script
-./breeze --backports prepare-provider-packages --version-suffix-for-svn rc1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze --backports prepare-provider-packages --version-suffix-for-svn rc1 PACKAGE PACKAGE ....
-```
-
-* Move the source tarball to dist folder
-
-```shell script
-mv apache-airflow-backport-providers-${VERSION}-source.tar.gz dist
-```
-
-* Sign all your packages
-
-```shell script
-pushd dist
-../dev/sign.sh *
-popd
-```
-
-* Push tags to Apache repository (assuming that you have apache remote pointing to apache/airflow repo)]
-
-```shell script
-git push apache backport-providers-${VERSION}
-```
-
-#### Commit the source packages to Apache SVN repo
-
-* Push the artifacts to ASF dev dist repo
-
-```shell script
-# First clone the repo if you do not have it
-svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev
-
-# update the repo in case you have it already
-cd airflow-dev
-svn update
-
-# Create a new folder for the release.
-cd airflow-dev/backport-providers
-svn mkdir ${VERSION}
-
-# Move the artifacts to svn folder
-mv ${AIRFLOW_REPO_ROOT}/dist/* ${VERSION}/
-
-# Add and commit
-svn add ${VERSION}/*
-svn commit -m "Add artifacts for Airflow ${VERSION}"
-
-cd ${AIRFLOW_REPO_ROOT}
-```
-
-Verify that the files are available at
-[backport-providers](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/)
-
-### Publish the RC convenience package to PyPI
-
-In order to publish to PyPI you just need to build and release packages. The packages should however
-contain the rcN suffix in the version name as well, so you need to use `--version-suffix-for-pypi` switch
-to prepare those packages. Note that these are different packages than the ones used for SVN upload
-though they should be generated from the same sources.
-
-* Generate the packages with the right RC version (specify the version suffix with PyPI switch). Note that
-this will clean up dist folder before generating the packages, so you will only have the right packages there.
-
-```shell script
-./breeze --backports prepare-provider-packages --version-suffix-for-pypi rc1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze --backports prepare-provider-packages --version-suffix-for-pypi rc1 PACKAGE PACKAGE ....
-```
-
-* Verify the artifacts that would be uploaded:
-
-```shell script
-twine check dist/*
-```
-
-* Upload the package to PyPi's test environment:
-
-```shell script
-twine upload -r pypitest dist/*
-```
-
-* Verify that the test packages look good by downloading it and installing them into a virtual environment.
-Twine prints the package links as output - separately for each package.
-
-* Upload the package to PyPi's production environment:
-
-```shell script
-twine upload -r pypi dist/*
-```
-
-* Copy the list of links to the uploaded packages - they will be useful in preparing VOTE email.
-
-* Again, confirm that the packages are available under the links printed.
-
-## Vote and verify the Backport Providers release candidate
-
-### Prepare voting email for Backport Providers release candidate
-
-Make sure the packages are in https://dist.apache.org/repos/dist/dev/airflow/backport-providers/
-
-Send out a vote to the dev@airflow.apache.org mailing list. Here you can prepare text of the
-email using the ${VERSION} variable you already set in the command line.
-
-subject:
-
-
-```shell script
-cat <<EOF
-[VOTE] Airflow Backport Providers ${VERSION}
-EOF
-```
-
-```shell script
-cat <<EOF
-Hey all,
-
-I have cut Airflow Backport Providers ${VERSION}. This email is calling a vote on the release,
-which will last for 72 hours - which means that it will end on $(date -d '+3 days').
-
-Consider this my (binding) +1.
-
-Airflow Backport Providers ${VERSION} are available at:
-https://dist.apache.org/repos/dist/dev/airflow/backport-providers/${VERSION}/
-
-*apache-airflow-backport-providers-${VERSION}-source.tar.gz* is a source release that comes
- with INSTALL instructions.
-
-*apache-airflow-backport-providers-<PROVIDER>-${VERSION}-bin.tar.gz* are the binary
- Python "sdist" release.
-
-The test procedure for PMCs and Contributors who would like to test the RC candidates are described in
-https://github.com/apache/airflow/blob/master/dev/README.md#vote-and-verify-the-backport-providers-release-candidate
-
-
-Public keys are available at:
-https://dist.apache.org/repos/dist/release/airflow/KEYS
-
-Please vote accordingly:
-
-[ ] +1 approve
-[ ] +0 no opinion
-[ ] -1 disapprove with the reason
-
-
-Only votes from PMC members are binding, but members of the community are
-encouraged to test the release and vote with "(non-binding)".
-
-Please note that the version number excludes the 'rcX' string, so it's now
-simply ${VERSION%rc?}. This will allow us to rename the artifact without modifying
-the artifact checksums when we actually release.
-
-Each of the packages contains detailed changelog. Here is the list of links to
-the released packages and changelogs:
-
-<PASTE TWINE UPLOAD LINKS HERE. SORT THEM BEFORE!>
-
-Cheers,
-<TODO: Your Name>
-
-EOF
-```
-
-Due to the nature of backport packages, not all packages have to be released as convenience
-packages in the final release. During the voting process
-the voting PMCs might decide to exclude certain packages from the release if some critical
-problems have been found in some packages.
-
-Please modify the message above accordingly to clearly exclude those packages.
-
-### Verify the release
-
-#### SVN check
-
-The files should be present in the sub-folder of
-[Airflow dist](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/)
-
-The following files should be present (9 files):
-
-* -source.tar.gz + .asc + .sha512 (one set of files)
-* -bin-tar.gz + .asc + .sha512 (one set of files per provider)
-* -.whl + .asc + .sha512 (one set of files per provider)
-
-As a PMC you should be able to clone the SVN repository:
-
-```shell script
-svn co https://dist.apache.org/repos/dist/dev/airflow/
-```
-
-Or update it if you already checked it out:
-
-```shell script
-svn update .
-```
-
-#### Verify the licences
-
-This can be done with the Apache RAT tool.
-
-* Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the sources,
-  the jar is inside)
-* Unpack the -source.tar.gz to a folder
-* Enter the folder and run the check (point to the place where you extracted the .jar)
-
-```shell script
-java -jar ../../apache-rat-0.13/apache-rat-0.13.jar -E .rat-excludes -d .
-```
-
-#### Verify the signatures
-
-Make sure you have the key of person signed imported in your GPG. You can find the valid keys in
-[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS).
-
-You can import the whole KEYS file:
-
-```shell script
-gpg --import KEYS
-```
-
-You can also import the keys individually from a keyserver. The below one uses Kaxil's key and
-retrieves it from the default GPG keyserver
-[OpenPGP.org](https://keys.openpgp.org):
-
-```shell script
-gpg --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-```
-
-You should choose to import the key when asked.
-
-Note that by being default, the OpenPGP server tends to be overloaded often and might respond with
-errors or timeouts. Many of the release managers also uploaded their keys to the
-[GNUPG.net](https://keys.gnupg.net) keyserver, and you can retrieve it from there.
-
-```shell script
-gpg --keyserver keys.gnupg.net --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-```
-
-Once you have the keys, the signatures can be verified by running this:
-
-```shell script
-for i in *.asc
-do
-   echo "Checking $i"; gpg --verify `basename $i .sha512 `
-done
-```
-
-This should produce results similar to the below. The "Good signature from ..." is indication
-that the signatures are correct. Do not worry about the "not certified with a trusted signature"
-warning. Most of the certificates used by release managers are self signed, that's why you get this
-warning. By importing the server in the previous step and importing it via ID from
-[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS) page, you know that
-this is a valid Key already.
-
-```
-Checking apache-airflow-1.10.12rc4-bin.tar.gz.asc
-gpg: assuming signed data in 'apache-airflow-1.10.12rc4-bin.tar.gz'
-gpg: Signature made sob, 22 sie 2020, 20:28:28 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.asc
-gpg: assuming signed data in 'apache_airflow-1.10.12rc4-py2.py3-none-any.whl'
-gpg: Signature made sob, 22 sie 2020, 20:28:31 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-Checking apache-airflow-1.10.12rc4-source.tar.gz.asc
-gpg: assuming signed data in 'apache-airflow-1.10.12rc4-source.tar.gz'
-gpg: Signature made sob, 22 sie 2020, 20:28:25 CEST
-gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
-gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
-gpg: WARNING: This key is not certified with a trusted signature!
-gpg:          There is no indication that the signature belongs to the owner.
-Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
-```
-
-#### Verify the SHA512 sum
-
-Run this:
-
-```shell script
-for i in *.sha512
-do
-    echo "Checking $i"; gpg --print-md SHA512 `basename $i .sha512 ` | diff - $i
-done
-```
-
-You should get output similar to:
-
-```
-Checking apache-airflow-1.10.12rc4-bin.tar.gz.sha512
-Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.sha512
-Checking apache-airflow-1.10.12rc4-source.tar.gz.sha512
-```
-
-### Verify if the Backport Packages release candidates "work" by Contributors
-
-This can be done (and we encourage to) by any of the Contributors. In fact, it's best if the
-actual users of Apache Airflow test it in their own staging/test installations. Each release candidate
-is available on PyPI apart from SVN packages, so everyone should be able to install
-the release candidate version of Airflow via simply (<VERSION> is 1.10.12 for example, and <X> is
-release candidate number 1,2,3,....).
-
-You can use any of the installation methods you prefer (you can even install it via the binary wheels
-downloaded from the SVN).
-
-
-#### Installing in your local virtualenv
-
-You have to make sure you have Airilow 1.10.* installed in your PIP virtualenv
-(the version you want to install providers with).
-
-```shell script
-pip install apache-airflow-backport-providers-<provider>==<VERSION>rc<X>
-```
-
-#### Installing with Breeze
-
-There is also an easy way of installation with Breeze if you have the latest sources of Apache Airflow.
-Here is a typical scenario.
-
-First copy all the provider packages .whl files to the `dist` folder.
-
-```shell script
-./breeze start-airflow --install-airflow-version <VERSION>rc<X> \
-    --python 3.7 --backend postgres --install-wheels
-```
-
-For 1.10 releases you can also use `--no-rbac-ui` flag disable RBAC UI of Airflow:
-
-```shell script
-./breeze start-airflow --install-airflow-version <VERSION>rc<X> \
-    --python 3.7 --backend postgres --install-wheels --no-rbac-ui
-```
-
-#### Building your own docker image
-
-If you prefer to build your own image, you can also use the official image and PyPI packages to test
-backport packages. This is especially helpful when you want to test integrations, but you need to install
-additional tools. Below is an example Dockerfile, which installs backport providers for Google and
-an additional third-party tools:
-
-```dockerfile
-FROM apache/airflow:1.10.12
-
-RUN pip install --user apache-airflow-backport-providers-google==2020.10.5.rc1
-
-RUN curl https://sdk.cloud.google.com | bash \
-    && echo "source /home/airflow/google-cloud-sdk/path.bash.inc" >> /home/airflow/.bashrc \
-    && echo "source /home/airflow/google-cloud-sdk/completion.bash.inc" >> /home/airflow/.bashrc
-
-USER 0
-RUN KUBECTL_VERSION="$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)" \
-    && KUBECTL_URL="https://storage.googleapis.com/kubernetes-release/release/${KUBECTL_VERSION}/bin/linux/amd64/kubectl" \
-    && curl -L "${KUBECTL_URL}" --output /usr/local/bin/kubectl \
-    && chmod +x /usr/local/bin/kubectl
-
-USER ${AIRFLOW_UID}
-```
-
-To build an image build and run a shell, run:
-
-```shell script
-docker build . -t my-airflow
-docker run  -ti \
-    --rm \
-    -v "$PWD/data:/opt/airflow/" \
-    -v "$PWD/keys/:/keys/" \
-    -p 8080:8080 \
-    -e GOOGLE_APPLICATION_CREDENTIALS=/keys/sa.json \
-    -e AIRFLOW__CORE__LOAD_EXAMPLES=True \
-    my-airflow bash
-```
-
-#### Verification
-
-Once you install and run Airflow, you can perform any verification you see as necessary to check
-that the Airflow works as you expected.
-
-## Publish the final releases of backport packages
-
-### Summarize the voting for the Backport Providers Release
-
-Once the vote has been passed, you will need to send a result vote to dev@airflow.apache.org:
-
-Subject:
-```shell script
-cat <<EOF
-[RESULT][VOTE] Airflow Backport Providers ${VERSION}
-EOF
-```
-
-Body:
-
-```shell script
-cat <<EOF
-
-Hey all,
-
-Airflow Backport Providers ${VERSION} (based on the ${VERSION_RC} candidate) has been accepted.
-
-N "+1" binding votes received:
-- PMC Member  (binding)
-...
-
-N "+1" non-binding votes received:
-
-- COMMITER (non-binding)
-
-Vote thread:
-https://lists.apache.org/thread.html/<TODO:REPLACE_ME_WITH_THE_VOTING_THREAD>@%3Cdev.airflow.apache.org%3E
-
-I'll continue with the release process and the release announcement will follow shortly.
-
-Cheers,
-<TODO: Your Name>
-
-EOF
-
-```
-
-### Publish release to SVN
-
-The best way of doing this is to svn cp  between the two repos (this avoids having to upload the binaries
-again, and gives a clearer history in the svn commit logs.
-
-We also need to archive older releases before copying the new ones
-[Release policy](http://www.apache.org/legal/release-policy.html#when-to-archive)
-
-```shell script
-# Set the variables
-export VERSION_RC=2020.5.20rc2
-export VERSION=${VERSION_RC/rc?/}
-
-# Set AIRFLOW_REPO_ROOT to the path of your git repo
-export AIRFLOW_REPO_ROOT=$(pwd)
-
-# Go to the directory where you have checked out the dev svn release
-# And go to the sub-folder with RC candidates
-cd "<ROOT_OF_YOUR_DEV_REPO>/backport-providers/${VERSION_RC}"
-export SOURCE_DIR=$(pwd)
-
-# Go the folder where you have checked out the release repo
-# Clone it if it's not done yet
-svn checkout https://dist.apache.org/repos/dist/release/airflow airflow-release
-
-# Update to latest version
-svn update
-
-# Create backport-providers folder if it does not exist
-# All latest releases are kept in this one folder without version sub-folder
-mkdir -pv backport-providers
-cd backport-providers
-
-# Move the artifacts to svn folder & remove the rc postfix
-for file in ${SOURCE_DIR}/*${VERSION_RC}*
-do
-  base_file=$(basename ${file})
-  svn cp "${file}" "${base_file/${VERSION_RC}/${VERSION}}"
-done
-
-
-# If some packages have been excluded, remove them now
-# Check the packages
-ls *<provider>*
-# Remove them
-svn rm *<provider>*
-
-# Check which old packages will be removed (you need python 3.6+)
-python ${AIRFLOW_REPO_ROOT}/provider_packages/remove_old_releases.py \
-    --directory .
-
-# Remove those packages
-python ${AIRFLOW_REPO_ROOT}/provider_packages/remove_old_releases.py \
-    --directory . --execute
-
-
-# Commit to SVN
-svn commit -m "Release Airflow Backport Providers ${VERSION} from ${VERSION_RC}"
-```
-
-Verify that the packages appear in
-[backport-providers](https://dist.apache.org/repos/dist/release/airflow/backport-providers)
-
-### Publish the final version convenience package to PyPI
-
-Checkout the RC Version:
-
-```shell script
-git checkout backport-providers-${VERSION_RC}
-```
-
-Tag and push the final version (providing that your apache remote is named 'apache'):
-
-```shell script
-git tag backport-providers-${VERSION}
-git push apache backport-providers-${VERSION}
-```
-
-In order to publish to PyPI you just need to build and release packages.
-
-* Generate the packages.
-
-```shell script
-./breeze --backports prepare-provider-packages
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze --backports prepare-provider-packages <PACKAGE> ...
-```
-
-In case you decided to remove some of the packages. remove them from dist folder now:
-
-```shell script
-ls dist/*<provider>*
-rm dist/*<provider>*
-```
-
-
-* Verify the artifacts that would be uploaded:
-
-```shell script
-twine check dist/*
-```
-
-* Upload the package to PyPi's test environment:
-
-```shell script
-twine upload -r pypitest dist/*
-```
-
-* Verify that the test packages look good by downloading it and installing them into a virtual environment.
-Twine prints the package links as output - separately for each package.
-
-* Upload the package to PyPi's production environment:
-
-```shell script
-twine upload -r pypi dist/*
-```
-
-### Notify developers of release
-
-- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that
-the artifacts have been published:
-
-Subject:
-```shell script
-cat <<EOF
-Airflow Backport Providers ${VERSION} are released
-EOF
-```
-
-Body:
-```shell script
-cat <<EOF
-Dear Airflow community,
-
-I'm happy to announce that Airflow Backport Providers packages ${VERSION} were just released.
-
-The source release, as well as the binary releases, are available here:
-
-https://dist.apache.org/repos/dist/release/airflow/backport-providers/
-
-We also made those versions available on PyPi for convenience ('pip install apache-airflow-backport-providers-*'):
-
-https://pypi.org/search/?q=apache-airflow-backport-providers
-
-The documentation and changelogs are available in the PyPI packages:
-
-<PASTE TWINE UPLOAD LINKS HERE. SORT THEM BEFORE!>
-
-
-Cheers,
-<your name>
-EOF
-```
-
-
-### Update Announcements page
-
-Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/)
-
------------------------------------------------------------------------------------------------------------
-
-## Prepare the Regular Provider Packages Alpha
-
-### Generate release notes
-
-Prepare release notes for all the packages you plan to release. Note that for now version number is
-hard-coded to 0.0.1 for all packages. Later on we are going to update the versions according
-to SEMVER versioning.
-
-Details about maintaining the SEMVER version are going to be discussed and implemented in
-[the related issue](https://github.com/apache/airflow/issues/11425)
-
-
-```shell script
-./breeze prepare-provider-readme [packages]
-```
-
-You can iterate and re-generate the same readme content as many times as you want.
-Generated readme files should be eventually committed to the repository.
-
-### Build regular provider packages for SVN apache upload
-
-There is a slightly different procedure if you build pre-release (alpha/beta) packages and the
-release candidates. For the Alpha artifacts there is no voting and signature/checksum check, so
-we do not need to care about this part. For release candidates - those packages might get promoted
-to "final" packages by just renaming the files, so internally they should keep the final version
-number without the rc suffix, even if they are rc1/rc2/... candidates.
-
-They also need to be signed and have checksum files. You can generate the checksum/signature files by running
-the "dev/sign.sh" script (assuming you have the right PGP key set-up for signing). The script
-generates corresponding .asc and .sha512 files for each file to sign.
-
-#### Build and sign the source and convenience packages
-
-Currently, we are releasing alpha provider packages together with the main sources of Airflow. In the future
-we are going to add procedure to release the sources of released provider packages separately.
-Details are in [the related issue](https://github.com/apache/airflow/issues/11425)
-
-For alpha/beta releases you need to specify both - svn and pyp i - suffixes, and they have to match. This is
-verified by the breeze script. Note that the script will clean up dist folder before generating the
-packages, so it will only contain the packages you intended to build.
-
-* Pre-release packages:
-
-```shell script
-export VERSION=0.0.1alpha1
-
-./breeze prepare-provider-packages --version-suffix-for-svn a1 --version-suffix-for-pypi a1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze prepare-provider-packages --version-suffix-for-svn a1 --version-suffix-for-pypi a1 \
-    PACKAGE PACKAGE ....
-```
-
-* Release candidate packages:
-
-```shell script
-export VERSION=0.0.1alpha1
-
-./breeze prepare-provider-packages --version-suffix-for-svn rc1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze prepare-provider-packages --version-suffix-for-svn rc1 PACKAGE PACKAGE ....
-```
-
-* Sign all your packages
-
-```shell script
-pushd dist
-../dev/sign.sh *
-popd
-```
-
-#### Commit the source packages to Apache SVN repo
-
-* Push the artifacts to ASF dev dist repo
-
-```shell script
-# First clone the repo if you do not have it
-svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev
-
-# update the repo in case you have it already
-cd airflow-dev
-svn update
-
-# Create a new folder for the release.
-cd airflow-dev/providers
-svn mkdir ${VERSION}
-
-# Move the artifacts to svn folder
-mv ${AIRFLOW_REPO_ROOT}/dist/* ${VERSION}/
-
-# Add and commit
-svn add ${VERSION}/*
-svn commit -m "Add artifacts for Airflow Providers ${VERSION}"
-
-cd ${AIRFLOW_REPO_ROOT}
-```
-
-Verify that the files are available at
-[backport-providers](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/)
-
-### Publish the Regular convenience package to PyPI
-
-
-In case of pre-release versions you build the same packages for both PyPI and SVN so you can simply use
-packages generated in the previous step and you can skip the "prepare" step below.
-
-In order to publish release candidate to PyPI you just need to build and release packages.
-The packages should however contain the rcN suffix in the version file name but not internally in the package,
-so you need to use `--version-suffix-for-pypi` switch to prepare those packages.
-Note that these are different packages than the ones used for SVN upload
-though they should be generated from the same sources.
-
-* Generate the packages with the right RC version (specify the version suffix with PyPI switch). Note that
-this will clean up dist folder before generating the packages, so you will only have the right packages there.
-
-```shell script
-./breeze prepare-provider-packages --version-suffix-for-pypi a1 --version-suffix-for-SVN a1
-```
-
-if you ony build few packages, run:
-
-```shell script
-./breeze prepare-provider-packages --version-suffix-for-pypi a1 \
-    PACKAGE PACKAGE ....
-```
-
-* Verify the artifacts that would be uploaded:
-
-```shell script
-twine check dist/*
-```
-
-* Upload the package to PyPi's test environment:
-
-```shell script
-twine upload -r pypitest dist/*
-```
-
-* Verify that the test packages look good by downloading it and installing them into a virtual environment.
-Twine prints the package links as output - separately for each package.
-
-* Upload the package to PyPi's production environment:
-
-```shell script
-twine upload -r pypi dist/*
-```
-
-* Again, confirm that the packages are available under the links printed.
-
-### Notify developers of release
-
-- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that
-the artifacts have been published:
-
-Subject:
-```shell script
-cat <<EOF
-Airflow Providers are released
-EOF
-```
-
-Body:
-```shell script
-cat <<EOF
-Dear Airflow community,
-
-I'm happy to announce that new version of Airflow Providers packages were just released.
-
-The source release, as well as the binary releases, are available here:
-
-https://dist.apache.org/repos/dist/release/airflow/providers/
-
-We also made those versions available on PyPi for convenience ('pip install apache-airflow-providers-*'):
-
-https://pypi.org/search/?q=apache-airflow-providers
-
-The documentation and changelogs are available in the PyPI packages:
-
-<PASTE TWINE UPLOAD LINKS HERE. SORT THEM BEFORE!>
-
-Cheers,
-<your name>
-EOF
-```
-
-
-### Update Announcements page
-
-Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/)
diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md
new file mode 100644
index 0000000..2fb1294
--- /dev/null
+++ b/dev/README_RELEASE_AIRFLOW.md
@@ -0,0 +1,733 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<!-- START doctoc generated TOC please keep comment here to allow auto update -->
+<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
+**Table of contents**
+
+- [Prepare the Apache Airflow Package RC](#prepare-the-apache-airflow-package-rc)
+  - [Build RC artifacts](#build-rc-artifacts)
+  - [Prepare PyPI convenience "snapshot" packages](#prepare-pypi-convenience-snapshot-packages)
+  - [\[Optional\] - Manually prepare production Docker Image](#%5Coptional%5C---manually-prepare-production-docker-image)
+  - [Prepare Vote email on the Apache Airflow release candidate](#prepare-vote-email-on-the-apache-airflow-release-candidate)
+- [Verify the release candidate by PMCs](#verify-the-release-candidate-by-pmcs)
+  - [SVN check](#svn-check)
+  - [Licence check](#licence-check)
+  - [Signature check](#signature-check)
+  - [SHA512 sum check](#sha512-sum-check)
+- [Verify release candidates by Contributors](#verify-release-candidates-by-contributors)
+- [Publish the final Apache Airflow release](#publish-the-final-apache-airflow-release)
+  - [Summarize the voting for the Apache Airflow release](#summarize-the-voting-for-the-apache-airflow-release)
+  - [Publish release to SVN](#publish-release-to-svn)
+  - [Prepare PyPI "release" packages](#prepare-pypi-release-packages)
+  - [Update CHANGELOG.md](#update-changelogmd)
+  - [\[Optional\] - Manually prepare production Docker Image](#%5Coptional%5C---manually-prepare-production-docker-image-1)
+  - [Publish documentation](#publish-documentation)
+  - [Notify developers of release](#notify-developers-of-release)
+  - [Update Announcements page](#update-announcements-page)
+
+<!-- END doctoc generated TOC please keep comment here to allow auto update -->
+
+You can find the prerequisites to release Apache Airflow in [README.md](README.md).
+
+# Prepare the Apache Airflow Package RC
+
+## Build RC artifacts
+
+The Release Candidate artifacts we vote upon should be the exact ones we vote against, without any modification than renaming – i.e. the contents of the files must be the same between voted release candidate and final release. Because of this the version in the built artifacts that will become the official Apache releases must not include the rcN suffix.
+
+- Set environment variables
+
+    ```shell script
+    # Set Version
+    export VERSION=1.10.2rc3
+
+
+    # Set AIRFLOW_REPO_ROOT to the path of your git repo
+    export AIRFLOW_REPO_ROOT=$(pwd)
+
+
+    # Example after cloning
+    git clone https://github.com/apache/airflow.git airflow
+    cd airflow
+    export AIRFLOW_REPO_ROOT=$(pwd)
+    ```
+
+- Set your version to 1.10.2 in `setup.py` (without the RC tag)
+- Commit the version change.
+
+- Tag your release
+
+    ```shell script
+    git tag -s ${VERSION}
+    ```
+
+- Clean the checkout: the sdist step below will
+
+    ```shell script
+    git clean -fxd
+    ```
+
+- Tarball the repo
+
+    ```shell script
+    git archive --format=tar.gz ${VERSION} --prefix=apache-airflow-${VERSION}/ -o apache-airflow-${VERSION}-source.tar.gz
+    ```
+
+
+- Generate sdist
+
+    NOTE: Make sure your checkout is clean at this stage - any untracked or changed files will otherwise be included
+     in the file produced.
+
+    ```shell script
+    python setup.py compile_assets sdist bdist_wheel
+    ```
+
+- Rename the sdist
+
+    ```shell script
+    mv dist/apache-airflow-${VERSION%rc?}.tar.gz apache-airflow-${VERSION}-bin.tar.gz
+    mv dist/apache_airflow-${VERSION%rc?}-py2.py3-none-any.whl apache_airflow-${VERSION}-py2.py3-none-any.whl
+    ```
+
+- Generate SHA512/ASC (If you have not generated a key yet, generate it by following instructions on http://www.apache.org/dev/openpgp.html#key-gen-generate-key)
+
+    ```shell script
+    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache-airflow-${VERSION}-source.tar.gz
+    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache-airflow-${VERSION}-bin.tar.gz
+    ${AIRFLOW_REPO_ROOT}/dev/sign.sh apache_airflow-${VERSION}-py2.py3-none-any.whl
+    ```
+
+- Tag & Push latest constraints files. This pushes constraints with rc suffix (this is expected)!
+
+    ```shell script
+    git checkout constraints-1-10
+    git tag -s "constraints-${VERSION}"
+    git push origin "constraints-${VERSION}"
+    ```
+
+- Push the artifacts to ASF dev dist repo
+
+```
+# First clone the repo
+svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev
+
+# Create new folder for the release
+cd airflow-dev
+svn mkdir ${VERSION}
+
+# Move the artifacts to svn folder & commit
+mv ${AIRFLOW_REPO_ROOT}/apache{-,_}airflow-${VERSION}* ${VERSION}/
+cd ${VERSION}
+svn add *
+svn commit -m "Add artifacts for Airflow ${VERSION}"
+```
+
+## Prepare PyPI convenience "snapshot" packages
+
+At this point we have the artefact that we vote on, but as a convenience to developers we also want to
+publish "snapshots" of the RC builds to pypi for installing via pip. Also those packages
+are used to build the production docker image in DockerHub, so we need to upload the packages
+before we push the tag to GitHub. Pushing the tag to GitHub automatically triggers image building in
+DockerHub.
+
+To do this we need to
+
+- Build the package:
+
+    ```shell script
+    python setup.py compile_assets egg_info --tag-build "$(sed -e "s/^[0-9.]*//" <<<"$VERSION")" sdist bdist_wheel
+    ```
+
+- Verify the artifacts that would be uploaded:
+
+    ```shell script
+    twine check dist/*
+    ```
+
+- Upload the package to PyPi's test environment:
+
+    ```shell script
+    twine upload -r pypitest dist/*
+    ```
+
+- Verify that the test package looks good by downloading it and installing it into a virtual environment. The package download link is available at:
+https://test.pypi.org/project/apache-airflow/#files
+
+- Upload the package to PyPi's production environment:
+`twine upload -r pypi dist/*`
+
+- Again, confirm that the package is available here:
+https://pypi.python.org/pypi/apache-airflow
+
+It is important to stress that this snapshot should not be named "release", and it
+is not supposed to be used by and advertised to the end-users who do not read the devlist.
+
+- Push Tag for the release candidate
+
+    This step should only be done now and not before, because it triggers an automated build of
+    the production docker image, using the packages that are currently released in PyPI
+    (both airflow and latest provider packages).
+
+    ```shell script
+    git push origin ${VERSION}
+    ```
+
+## \[Optional\] - Manually prepare production Docker Image
+
+Production Docker images should be automatically built in 2-3 hours after the release tag has been
+pushed. If this did not happen - please login to DockerHub and check the status of builds:
+[Build Timeline](https://hub.docker.com/repository/docker/apache/airflow/timeline)
+
+In case you need, you can also build and push the images manually:
+
+Airflow 2+:
+
+```shell script
+export DOCKER_REPO=docker.io/apache/airflow
+for python_version in "3.6" "3.7" "3.8"
+(
+  export DOCKER_TAG=${VERSION}-python${python_version}
+  ./scripts/ci/images/ci_build_dockerhub.sh
+)
+```
+
+This will wipe Breeze cache and docker-context-files in order to make sure the build is "clean".
+
+Airflow 1.10:
+
+```shell script
+for python_version in "2.7" "3.5" "3.6" "3.7" "3.8"
+do
+    ./breeze build-image --production-image --python ${python_version} \
+        --image-tag apache/airflow:${VERSION}-python${python_version} --build-cache-local
+    docker push apache/airflow:${VERSION}-python${python_version}
+done
+docker tag apache/airflow:${VERSION}-python3.6 apache/airflow:${VERSION}
+docker push apache/airflow:${VERSION}
+```
+
+
+## Prepare Vote email on the Apache Airflow release candidate
+
+- Use the dev/airflow-jira script to generate a list of Airflow JIRAs that were closed in the release.
+
+- Send out a vote to the dev@airflow.apache.org mailing list:
+
+Subject:
+
+```
+[VOTE] Airflow 1.10.2rc3
+```
+
+Body:
+
+```
+Hey all,
+
+I have cut Airflow 1.10.2 RC3. This email is calling a vote on the release,
+which will last for 72 hours. Consider this my (binding) +1.
+
+Airflow 1.10.2 RC3 is available at:
+https://dist.apache.org/repos/dist/dev/airflow/1.10.2rc3/
+
+*apache-airflow-1.10.2rc3-source.tar.gz* is a source release that comes
+with INSTALL instructions.
+*apache-airflow-1.10.2rc3-bin.tar.gz* is the binary Python "sdist" release.
+
+Public keys are available at:
+https://dist.apache.org/repos/dist/release/airflow/KEYS
+
+Only votes from PMC members are binding, but the release manager should encourage members of the community
+to test the release and vote with "(non-binding)".
+
+The test procedure for PMCs and Contributors who would like to test this RC are described in
+https://github.com/apache/airflow/blob/master/dev/README.md#vote-and-verify-the-apache-airflow-release-candidate
+
+Please note that the version number excludes the `rcX` string, so it's now
+simply 1.10.2. This will allow us to rename the artifact without modifying
+the artifact checksums when we actually release.
+
+
+Changes since 1.10.2rc2:
+*Bugs*:
+[AIRFLOW-3732] Fix issue when trying to edit connection in RBAC UI
+[AIRFLOW-2866] Fix missing CSRF token head when using RBAC UI (#3804)
+...
+
+
+*Improvements*:
+[AIRFLOW-3302] Small CSS fixes (#4140)
+[Airflow-2766] Respect shared datetime across tabs
+...
+
+
+*New features*:
+[AIRFLOW-2874] Enables FAB's theme support (#3719)
+[AIRFLOW-3336] Add new TriggerRule for 0 upstream failures (#4182)
+...
+
+
+*Doc-only Change*:
+[AIRFLOW-XXX] Fix BashOperator Docstring (#4052)
+[AIRFLOW-3018] Fix Minor issues in Documentation
+...
+
+Cheers,
+<your name>
+```
+
+
+# Verify the release candidate by PMCs
+
+The PMCs should verify the releases in order to make sure the release is following the
+[Apache Legal Release Policy](http://www.apache.org/legal/release-policy.html).
+
+At least 3 (+1) votes should be recorded in accordance to
+[Votes on Package Releases](https://www.apache.org/foundation/voting.html#ReleaseVotes)
+
+The legal checks include:
+
+* checking if the packages are present in the right dist folder on svn
+* verifying if all the sources have correct licences
+* verifying if release manager signed the releases with the right key
+* verifying if all the checksums are valid for the release
+
+## SVN check
+
+The files should be present in the sub-folder of
+[Airflow dist](https://dist.apache.org/repos/dist/dev/airflow/)
+
+The following files should be present (9 files):
+
+* -bin-tar.gz + .asc + .sha512
+* -source.tar.gz + .asc + .sha512
+* -.whl + .asc + .sha512
+
+As a PMC you should be able to clone the SVN repository:
+
+```shell script
+svn co https://dist.apache.org/repos/dist/dev/airflow
+```
+
+Or update it if you already checked it out:
+
+```shell script
+svn update .
+```
+
+## Licence check
+
+This can be done with the Apache RAT tool.
+
+* Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the sources,
+  the jar is inside)
+* Unpack the -source.tar.gz to a folder
+* Enter the folder and run the check (point to the place where you extracted the .jar)
+
+```shell script
+java -jar ../../apache-rat-0.13/apache-rat-0.13.jar -E .rat-excludes -d .
+```
+
+## Signature check
+
+Make sure you have the key of person signed imported in your GPG. You can find the valid keys in
+[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS).
+
+You can import the whole KEYS file:
+
+```shell script
+gpg --import KEYS
+```
+
+You can also import the keys individually from a keyserver. The below one uses Kaxil's key and
+retrieves it from the default GPG keyserver
+[OpenPGP.org](https://keys.openpgp.org):
+
+```shell script
+gpg --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+```
+
+You should choose to import the key when asked.
+
+Note that by being default, the OpenPGP server tends to be overloaded often and might respond with
+errors or timeouts. Many of the release managers also uploaded their keys to the
+[GNUPG.net](https://keys.gnupg.net) keyserver, and you can retrieve it from there.
+
+```shell script
+gpg --keyserver keys.gnupg.net --receive-keys 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+```
+
+Once you have the keys, the signatures can be verified by running this:
+
+```shell script
+for i in *.asc
+do
+   echo "Checking $i"; gpg --verify `basename $i .asc`
+done
+```
+
+This should produce results similar to the below. The "Good signature from ..." is indication
+that the signatures are correct. Do not worry about the "not certified with a trusted signature"
+warning. Most of the certificates used by release managers are self signed, that's why you get this
+warning. By importing the server in the previous step and importing it via ID from
+[KEYS](https://dist.apache.org/repos/dist/release/airflow/KEYS) page, you know that
+this is a valid Key already.
+
+```
+Checking apache-airflow-1.10.12rc4-bin.tar.gz.asc
+gpg: assuming signed data in 'apache-airflow-1.10.12rc4-bin.tar.gz'
+gpg: Signature made sob, 22 sie 2020, 20:28:28 CEST
+gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
+gpg: WARNING: This key is not certified with a trusted signature!
+gpg:          There is no indication that the signature belongs to the owner.
+Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
+Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.asc
+gpg: assuming signed data in 'apache_airflow-1.10.12rc4-py2.py3-none-any.whl'
+gpg: Signature made sob, 22 sie 2020, 20:28:31 CEST
+gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
+gpg: WARNING: This key is not certified with a trusted signature!
+gpg:          There is no indication that the signature belongs to the owner.
+Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
+Checking apache-airflow-1.10.12rc4-source.tar.gz.asc
+gpg: assuming signed data in 'apache-airflow-1.10.12rc4-source.tar.gz'
+gpg: Signature made sob, 22 sie 2020, 20:28:25 CEST
+gpg:                using RSA key 12717556040EEF2EEAF1B9C275FCCD0A25FA0E4B
+gpg: Good signature from "Kaxil Naik <ka...@gmail.com>" [unknown]
+gpg: WARNING: This key is not certified with a trusted signature!
+gpg:          There is no indication that the signature belongs to the owner.
+Primary key fingerprint: 1271 7556 040E EF2E EAF1  B9C2 75FC CD0A 25FA 0E4B
+```
+
+## SHA512 sum check
+
+Run this:
+
+```shell script
+for i in *.sha512
+do
+    echo "Checking $i"; shasum -a 512 `basename $i .sha512 ` | diff - $i
+done
+```
+
+You should get output similar to:
+
+```
+Checking apache-airflow-1.10.12rc4-bin.tar.gz.sha512
+Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.sha512
+Checking apache-airflow-1.10.12rc4-source.tar.gz.sha512
+```
+
+# Verify release candidates by Contributors
+
+This can be done (and we encourage to) by any of the Contributors. In fact, it's best if the
+actual users of Apache Airflow test it in their own staging/test installations. Each release candidate
+is available on PyPI apart from SVN packages, so everyone should be able to install
+the release candidate version of Airflow via simply (<VERSION> is 1.10.12 for example, and <X> is
+release candidate number 1,2,3,....).
+
+```shell script
+pip install apache-airflow==<VERSION>rc<X>
+```
+
+Optionally it can be followed with constraints
+
+```shell script
+pip install apache-airflow==<VERSION>rc<X> \
+  --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-<VERSION>/constraints-3.6.txt"`
+```
+
+Note that the constraints contain python version that you are installing it with.
+
+You can use any of the installation methods you prefer (you can even install it via the binary wheel
+downloaded from the SVN).
+
+There is also an easy way of installation with Breeze if you have the latest sources of Apache Airflow.
+Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler:
+
+```shell script
+./breeze start-airflow --install-airflow-version <VERSION>rc<X> --python 3.7 --backend postgres
+```
+
+For 1.10 releases you can also use `--no-rbac-ui` flag disable RBAC UI of Airflow:
+
+```shell script
+./breeze start-airflow --install-airflow-version <VERSION>rc<X> --python 3.7 --backend postgres --no-rbac-ui
+```
+
+Once you install and run Airflow, you should perform any verification you see as necessary to check
+that the Airflow works as you expected.
+
+# Publish the final Apache Airflow release
+
+## Summarize the voting for the Apache Airflow release
+
+Once the vote has been passed, you will need to send a result vote to dev@airflow.apache.org:
+
+Subject:
+
+```
+[RESULT][VOTE] Airflow 1.10.2rc3
+```
+
+Message:
+
+```
+Hello,
+
+Apache Airflow 1.10.2 (based on RC3) has been accepted.
+
+4 “+1” binding votes received:
+- Kaxil Naik  (binding)
+- Bolke de Bruin (binding)
+- Ash Berlin-Taylor (binding)
+- Tao Feng (binding)
+
+
+4 "+1" non-binding votes received:
+
+- Deng Xiaodong (non-binding)
+- Stefan Seelmann (non-binding)
+- Joshua Patchus (non-binding)
+- Felix Uellendall (non-binding)
+
+Vote thread:
+https://lists.apache.org/thread.html/736404ca3d2b2143b296d0910630b9bd0f8b56a0c54e3a05f4c8b5fe@%3Cdev.airflow.apache.org%3E
+
+I'll continue with the release process, and the release announcement will follow shortly.
+
+Cheers,
+<your name>
+```
+
+
+## Publish release to SVN
+
+You need to migrate the RC artifacts that passed to this repository:
+https://dist.apache.org/repos/dist/release/airflow/
+(The migration should include renaming the files so that they no longer have the RC number in their filenames.)
+
+The best way of doing this is to svn cp between the two repos (this avoids having to upload the binaries again, and gives a clearer history in the svn commit logs):
+
+```shell script
+# First clone the repo
+export RC=1.10.4rc5
+export VERSION=${RC/rc?/}
+svn checkout https://dist.apache.org/repos/dist/release/airflow airflow-release
+
+# Create new folder for the release
+cd airflow-release
+svn mkdir ${VERSION}
+cd ${VERSION}
+
+# Move the artifacts to svn folder & commit
+for f in ../../airflow-dev/$RC/*; do svn cp $f ${$(basename $f)/rc?/}; done
+svn commit -m "Release Airflow ${VERSION} from ${RC}"
+
+# Remove old release
+# http://www.apache.org/legal/release-policy.html#when-to-archive
+cd ..
+export PREVIOUS_VERSION=1.10.1
+svn rm ${PREVIOUS_VERSION}
+svn commit -m "Remove old release: ${PREVIOUS_VERSION}"
+```
+
+Verify that the packages appear in [airflow](https://dist.apache.org/repos/dist/release/airflow/)
+
+## Prepare PyPI "release" packages
+
+At this point we release an official package:
+
+- Build the package:
+
+    ```shell script
+    python setup.py compile_assets sdist bdist_wheel
+    ```
+
+- Verify the artifacts that would be uploaded:
+
+    ```shell script
+    twine check dist/*
+    ```
+
+- Upload the package to PyPi's test environment:
+
+    ```shell script
+    twine upload -r pypitest dist/*
+    ```
+
+- Verify that the test package looks good by downloading it and installing it into a virtual environment.
+    The package download link is available at: https://test.pypi.org/project/apache-airflow/#files
+
+- Upload the package to PyPi's production environment:
+
+    ```shell script
+    twine upload -r pypi dist/*
+    ```
+
+- Again, confirm that the package is available here: https://pypi.python.org/pypi/apache-airflow
+
+## Update CHANGELOG.md
+
+- Get a diff between the last version and the current version:
+
+    ```shell script
+    git log 1.8.0..1.9.0 --pretty=oneline
+    ```
+
+- Update CHANGELOG.md with the details, and commit it.
+
+- Re-Tag & Push the constraints files with the final release version.
+
+    ```shell script
+    git checkout constraints-${RC}
+    git tag -s "constraints-${VERSION}"
+    git push origin "constraints-${VERSION}"
+    ```
+
+- Push Tag for the final version
+
+    This step should only be done now and not before, because it triggers an automated build of
+    the production docker image, using the packages that are currently released in PyPI
+    (both airflow and latest provider packages).
+
+    ```shell script
+    git push origin ${VERSION}
+    ```
+
+## \[Optional\] - Manually prepare production Docker Image
+
+Production Docker images should be automatically built in 2-3 hours after the release tag has been
+pushed. If this did not happen - please login to DockerHub and check the status of builds:
+[Build Timeline](https://hub.docker.com/repository/docker/apache/airflow/timeline)
+
+In case you need, you can also build and push the images manually:
+
+Airflow 2+:
+
+```shell script
+export DOCKER_REPO=docker.io/apache/airflow
+for python_version in "3.6" "3.7" "3.8"
+(
+  export DOCKER_TAG=${VERSION}-python${python_version}
+  ./scripts/ci/images/ci_build_dockerhub.sh
+)
+```
+
+This will wipe Breeze cache and docker-context-files in order to make sure the build is "clean".
+
+
+Airflow 1.10:
+
+```shell script
+for python_version in "2.7" "3.5" "3.6" "3.7" "3.8"
+do
+    ./breeze build-image --production-image --python ${python_version} \
+        --image-tag apache/airflow:${VERSION}-python${python_version} --build-cache-local
+    docker push apache/airflow:${VERSION}-python${python_version}
+done
+docker tag apache/airflow:${VERSION}-python3.6 apache/airflow:${VERSION}
+docker push apache/airflow:${VERSION}
+```
+
+## Publish documentation
+
+Documentation is an essential part of the product and should be made available to users.
+In our cases, documentation for the released versions is published in a separate repository - [`apache/airflow-site`](https://github.com/apache/airflow-site), but the documentation source code and build tools are available in the `apache/airflow` repository, so you have to coordinate between the two repositories to be able to build the documentation.
+
+Documentation for providers can be found in the ``/docs/apache-airflow`` directory.
+
+- First, copy the airflow-site repository and set the environment variable ``AIRFLOW_SITE_DIRECTORY``.
+
+    ```shell script
+    git clone https://github.com/apache/airflow-site.git airflow-site
+    cd airflow-site
+    export AIRFLOW_SITE_DIRECTORY="$(pwd)"
+    ```
+
+- Then you can go to the directory and build the necessary documentation packages
+
+    ```shell script
+    cd "${AIRFLOW_REPO_ROOT}"
+    ./breeze build-docs -- --package-filter apache-airflow --for-production
+    ```
+
+- Now you can preview the documentation.
+
+    ```shell script
+    ./docs/start_doc_server.sh
+    ```
+
+- Copy the documentation to the ``airflow-site`` repository, create commit and push changes.
+
+    ```shell script
+    ./docs/publish_docs.py --package apache-airflow
+    cd "${AIRFLOW_SITE_DIRECTORY}"
+    git commit -m "Add documentation for Apache Airflow ${VERSION}"
+    git push
+    ```
+
+## Notify developers of release
+
+- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that
+the artifacts have been published:
+
+Subject:
+
+```shell script
+cat <<EOF
+Airflow ${VERSION} is released
+EOF
+```
+
+Body:
+
+```shell script
+cat <<EOF
+Dear Airflow community,
+
+I'm happy to announce that Airflow ${VERSION} was just released.
+
+The source release, as well as the binary "sdist" release, are available
+here:
+
+https://dist.apache.org/repos/dist/release/airflow/${VERSION}/
+
+We also made this version available on PyPi for convenience (`pip install apache-airflow`):
+
+https://pypi.python.org/pypi/apache-airflow
+
+The documentation is available on:
+https://airflow.apache.org/
+https://airflow.apache.org/docs/apache-airflow/${VERSION}/
+
+Find the CHANGELOG here for more details:
+
+https://airflow.apache.org/changelog.html#airflow-1-10-2-2019-01-19
+
+Cheers,
+<your name>
+EOF
+```
+
+## Update Announcements page
+
+Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/)
diff --git a/docs/production-deployment.rst b/docs/production-deployment.rst
index 335e713..b0ffa02 100644
--- a/docs/production-deployment.rst
+++ b/docs/production-deployment.rst
@@ -51,7 +51,7 @@ does not contain ``build-essential``. If you need compiler like gcc or g++ or ma
 are not found in the image and it is recommended that you follow the "customize" route instead.
 
 How to extend the image - it is something you are most likely familiar with - simply
-build a new image using Dockerfile's ``FROM:`` directive and add whatever you need. Then you can add your
+build a new image using Dockerfile's ``FROM`` directive and add whatever you need. Then you can add your
 Debian dependencies with ``apt`` or PyPI dependencies with ``pip install`` or any other stuff you need.
 
 You should be aware, about a few things:
@@ -64,7 +64,7 @@ You should be aware, about a few things:
 
 .. code-block:: dockerfile
 
-  FROM: apache/airflow:1.10.14
+  FROM apache/airflow:1.10.14
   USER root
   RUN apt-get update \
     && apt-get install -y --no-install-recommends \
@@ -81,7 +81,7 @@ You should be aware, about a few things:
 
 .. code-block:: dockerfile
 
-  FROM: apache/airflow:1.10.14
+  FROM apache/airflow:1.10.14
   RUN pip install --no-cache-dir --user my-awesome-pip-dependency-to-add
 
 
@@ -92,7 +92,7 @@ You should be aware, about a few things:
 
 .. code-block:: dockerfile
 
-  FROM: apache/airflow:1.10.14
+  FROM apache/airflow:1.10.14
   USER root
   RUN apt-get update \
     && apt-get install -y --no-install-recommends \
@@ -116,7 +116,7 @@ suited to prepare optimized production images.
 The advantage of this method is that it produces optimized image even if you need some compile-time
 dependencies that are not needed in the final image. You need to use Airflow Sources to build such images
 from the `official distribution folder of Apache Airflow <https://downloads.apache.org/airflow/>`_ for the
-released versions, or checked out from the Github project if you happen to do it from git sources.
+released versions, or checked out from the GitHub project if you happen to do it from git sources.
 
 The easiest way to build the image image is to use ``breeze`` script, but you can also build such customized
 image by running appropriately crafted docker build in which you specify all the ``build-args``
@@ -133,16 +133,16 @@ additional apt dev and runtime dependencies.
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
-    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc"
-    --build-arg ADDITIONAL_PYTHON_DEPS="pandas"
-    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++"
-    --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless"
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \
+    --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \
+    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \
+    --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless" \
     --tag my-image
 
 
@@ -166,7 +166,7 @@ based on example in `this comment <https://github.com/apache/airflow/issues/8605
   docker build . -f Dockerfile \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
@@ -236,7 +236,7 @@ Building the image (after copying the files downloaded to the "docker-context-fi
 
   ./breeze build-image \
       --production-image --python 3.7 --install-airflow-version=1.10.14 \
-      --disable-mysql-client-installation --disable-pip-cache --add-local-pip-wheels \
+      --disable-mysql-client-installation --disable-pip-cache --install-from-local-files-when-building \
       --constraints-location="/docker-context-files/constraints-1-10.txt"
 
 or
@@ -246,7 +246,7 @@ or
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
@@ -254,7 +254,7 @@ or
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
     --build-arg INSTALL_MYSQL_CLIENT="false" \
     --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" \
-    --build-arg AIRFLOW_LOCAL_PIP_WHEELS="true" \
+    --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="true" \
     --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-1-10.txt"
 
 
@@ -263,7 +263,7 @@ Customizing & extending the image together
 
 You can combine both - customizing & extending the image. You can build the image first using
 ``customize`` method (either with docker command or with ``breeze`` and then you can ``extend``
-the resulting image using ``FROM:`` any dependencies you want.
+the resulting image using ``FROM`` any dependencies you want.
 
 Customizing PYPI installation
 .............................
@@ -389,102 +389,116 @@ The following build arguments (``--build-arg`` in docker build command) can be u
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | Build argument                           | Default value                            | Description                              |
 +==========================================+==========================================+==========================================+
-| ``PYTHON_BASE_IMAGE``                    | ``python:3.6-slim-buster``               | Base python image                        |
+| ``PYTHON_BASE_IMAGE``                    | ``python:3.6-slim-buster``               | Base python image.                       |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``PYTHON_MAJOR_MINOR_VERSION``           | ``3.6``                                  | major/minor version of Python (should    |
-|                                          |                                          | match base image)                        |
+|                                          |                                          | match base image).                       |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_VERSION``                      | ``2.0.0.dev0``                           | version of Airflow                       |
+| ``AIRFLOW_VERSION``                      | ``2.0.0.dev0``                           | version of Airflow.                      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_REPO``                         | ``apache/airflow``                       | the repository from which PIP            |
-|                                          |                                          | dependencies are pre-installed           |
+|                                          |                                          | dependencies are pre-installed.          |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_BRANCH``                       | ``master``                               | the branch from which PIP dependencies   |
-|                                          |                                          | are pre-installed initially              |
+|                                          |                                          | are pre-installed initially.             |
++------------------------------------------+------------------------------------------+------------------------------------------+
+| ``AIRFLOW_CONSTRAINTS_LOCATION``         |                                          | If not empty, it will override the       |
+|                                          |                                          | source of the constraints with the       |
+|                                          |                                          | specified URL or file. Note that the     |
+|                                          |                                          | file has to be in docker context so      |
+|                                          |                                          | it's best to place such file in          |
+|                                          |                                          | one of the folders included in           |
+|                                          |                                          | .dockerignore.                           |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_CONSTRAINTS_REFERENCE``        | ``constraints-master``                   | reference (branch or tag) from GitHub    |
-|                                          |                                          | repository from which constraints are    |
-|                                          |                                          | used. By default it is set to            |
-|                                          |                                          | ``constraints-master`` but can be        |
-|                                          |                                          | ``constraints-1-10`` for 1.10.* versions |
-|                                          |                                          | or it could point to specific version    |
-|                                          |                                          | for example ``constraints-1.10.14``      |
+| ``AIRFLOW_CONSTRAINTS_REFERENCE``        | ``constraints-master``                   | Reference (branch or tag) from GitHub    |
+|                                          |                                          | where constraints file is taken from     |
+|                                          |                                          | It can be ``constraints-master`` but     |
+|                                          |                                          | also can be ``constraints-1-10`` for     |
+|                                          |                                          | 1.10.* installation. In case of building |
+|                                          |                                          | specific version you want to point it    |
+|                                          |                                          | to specific tag, for example             |
+|                                          |                                          | ``constraints-1.10.14``.                 |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_EXTRAS``                       | (see Dockerfile)                         | Default extras with which airflow is     |
-|                                          |                                          | installed                                |
+|                                          |                                          | installed.                               |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``INSTALL_AIRFLOW_VIA_PIP``              | ``false``                                | If set to true, Airflow is installed via |
-|                                          |                                          | pip install. if you want to install      |
-|                                          |                                          | Airflow from externally provided binary  |
-|                                          |                                          | package you can set it to false, place   |
-|                                          |                                          | the package in ``docker-context-files``  |
-|                                          |                                          | and set ``AIRFLOW_LOCAL_PIP_WHEELS`` to  |
-|                                          |                                          | true. You have to also set to true the   |
+| ``INSTALL_FROM_PYPI``                    | ``true``                                 | If set to true, Airflow is installed     |
+|                                          |                                          | from PyPI. if you want to install        |
+|                                          |                                          | Airflow from self-build package          |
+|                                          |                                          | you can set it to false, put package in  |
+|                                          |                                          | ``docker-context-files`` and set         |
+|                                          |                                          | ``INSTALL_FROM_DOCKER_CONTEXT_FILES`` to |
+|                                          |                                          | ``true``. For this you have to also keep |
 |                                          |                                          | ``AIRFLOW_PRE_CACHED_PIP_PACKAGES`` flag |
+|                                          |                                          | set to ``false``.                        |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_PRE_CACHED_PIP_PACKAGES``      | ``true``                                 | Allows to pre-cache airflow PIP packages |
+| ``AIRFLOW_PRE_CACHED_PIP_PACKAGES``      | ``false``                                | Allows to pre-cache airflow PIP packages |
 |                                          |                                          | from the GitHub of Apache Airflow        |
 |                                          |                                          | This allows to optimize iterations for   |
-|                                          |                                          | Image builds and speeds up CI builds     |
-|                                          |                                          | But in some corporate environments it    |
-|                                          |                                          | might be forbidden to download anything  |
-|                                          |                                          | from public repositories.                |
+|                                          |                                          | Image builds and speeds up CI builds.    |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_LOCAL_PIP_WHEELS``             | ``false``                                | If set to true, Airflow and it's         |
-|                                          |                                          | dependencies are installed during build  |
-|                                          |                                          | from locally downloaded .whl             |
-|                                          |                                          | files placed in the                      |
-|                                          |                                          | ``docker-context-files``.                |
+| ``INSTALL_FROM_DOCKER_CONTEXT_FILES``    | ``false``                                | If set to true, Airflow, providers and   |
+|                                          |                                          | all dependencies are installed from      |
+|                                          |                                          | from locally built/downloaded            |
+|                                          |                                          | .whl and .tar.gz files placed in the     |
+|                                          |                                          | ``docker-context-files``. In certain     |
+|                                          |                                          | corporate environments, this is required |
+|                                          |                                          | to install airflow from such pre-vetted  |
+|                                          |                                          | packages rather than from PyPI. For this |
+|                                          |                                          | to work, also set ``INSTALL_FROM_PYPI``  |
+|                                          |                                          | to false.                                |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_AIRFLOW_EXTRAS``            |                                          | Optional additional extras with which    |
-|                                          |                                          | airflow is installed                     |
+|                                          |                                          | airflow is installed.                    |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_PYTHON_DEPS``               |                                          | Optional python packages to extend       |
-|                                          |                                          | the image with some extra dependencies   |
+|                                          |                                          | the image with some extra dependencies.  |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``DEV_APT_COMMAND``                      | (see Dockerfile)                         | Dev apt command executed before dev deps |
-|                                          |                                          | are installed in the Build image         |
+|                                          |                                          | are installed in the Build image.        |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_DEV_APT_COMMAND``           |                                          | Additional Dev apt command executed      |
 |                                          |                                          | before dev dep are installed             |
-|                                          |                                          | in the Build image. Should start with && |
+|                                          |                                          | in the Build image. Should start with    |
+|                                          |                                          | ``&&``.                                  |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``DEV_APT_DEPS``                         | (see Dockerfile)                         | Dev APT dependencies installed           |
-|                                          |                                          | in the Build image                       |
+|                                          |                                          | in the Build image.                      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_DEV_APT_DEPS``              |                                          | Additional apt dev dependencies          |
-|                                          |                                          | installed in the Build image             |
+|                                          |                                          | installed in the Build image.            |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_DEV_APT_ENV``               |                                          | Additional env variables defined         |
-|                                          |                                          | when installing dev deps                 |
+|                                          |                                          | when installing dev deps.                |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``RUNTIME_APT_COMMAND``                  | (see Dockerfile)                         | Runtime apt command executed before deps |
-|                                          |                                          | are installed in the Main image          |
+|                                          |                                          | are installed in the Main image.         |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_RUNTIME_APT_COMMAND``       |                                          | Additional Runtime apt command executed  |
 |                                          |                                          | before runtime dep are installed         |
-|                                          |                                          | in the Main image. Should start with &&  |
+|                                          |                                          | in the Main image. Should start with     |
+|                                          |                                          | ``&&``.                                  |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``RUNTIME_APT_DEPS``                     | (see Dockerfile)                         | Runtime APT dependencies installed       |
-|                                          |                                          | in the Main image                        |
+|                                          |                                          | in the Main image.                       |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_RUNTIME_APT_DEPS``          |                                          | Additional apt runtime dependencies      |
-|                                          |                                          | installed in the Main image              |
+|                                          |                                          | installed in the Main image.             |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``ADDITIONAL_RUNTIME_APT_ENV``           |                                          | Additional env variables defined         |
-|                                          |                                          | when installing runtime deps             |
+|                                          |                                          | when installing runtime deps.            |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_HOME``                         | ``/opt/airflow``                         | Airflow’s HOME (that’s where logs and    |
-|                                          |                                          | sqlite databases are stored)             |
+|                                          |                                          | sqlite databases are stored).            |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_UID``                          | ``50000``                                | Airflow user UID                         |
+| ``AIRFLOW_UID``                          | ``50000``                                | Airflow user UID.                        |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``AIRFLOW_GID``                          | ``50000``                                | Airflow group GID. Note that most files  |
 |                                          |                                          | created on behalf of airflow user belong |
 |                                          |                                          | to the ``root`` group (0) to keep        |
-|                                          |                                          | OpenShift Guidelines compatibility       |
+|                                          |                                          | OpenShift Guidelines compatibility.      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_USER_HOME_DIR``                | ``/home/airflow``                        | Home directory of the Airflow user       |
+| ``AIRFLOW_USER_HOME_DIR``                | ``/home/airflow``                        | Home directory of the Airflow user.      |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``CASS_DRIVER_BUILD_CONCURRENCY``        | ``8``                                    | Number of processors to use for          |
 |                                          |                                          | cassandra PIP install (speeds up         |
@@ -493,7 +507,7 @@ The following build arguments (``--build-arg`` in docker build command) can be u
 +------------------------------------------+------------------------------------------+------------------------------------------+
 | ``INSTALL_MYSQL_CLIENT``                 | ``true``                                 | Whether MySQL client should be installed |
 |                                          |                                          | The mysql extra is removed from extras   |
-|                                          |                                          | if the client is not installed           |
+|                                          |                                          | if the client is not installed.          |
 +------------------------------------------+------------------------------------------+------------------------------------------+
 
 There are build arguments that determine the installation mechanism of Apache Airflow for the
@@ -503,59 +517,33 @@ production image. There are three types of build:
 * You can build the image from released PyPi airflow package (used to build the official Docker image)
 * You can build the image from any version in GitHub repository(this is used mostly for system testing).
 
-+-----------------------------------+-----------------------------------+
-| Build argument                    | What to specify                   |
-+===================================+===================================+
-| ``AIRFLOW_INSTALL_SOURCES``       | Should point to the sources of    |
-|                                   | of Apache Airflow. It can be      |
-|                                   | either "." for installation from  |
-|                                   | local sources, "apache-airflow"   |
-|                                   | for installation from packages    |
-|                                   | and URL to installation from      |
-|                                   | GitHub repository (see below)     |
-|                                   | to install from any GitHub        |
-|                                   | version                           |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_INSTALL_VERSION``       | Optional - might be used for      |
-|                                   | package installation case to      |
-|                                   | set Airflow version for example   |
-|                                   | "==1.10.14". Remember to also     |
-|                                   | Set ``AIRFLOW_VERSION``           |
-|                                   | when you use it.                  |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | reference (branch or tag) from    |
-|                                   | GitHub where constraints file     |
-|                                   | is taken from. By default it is   |
-|                                   | ``constraints-master`` but can be |
-|                                   | ``constraints-1-10`` for 1.10.*   |
-|                                   | constraint or if you want to      |
-|                                   | point to specific version         |
-|                                   | might be ``constraints-1.10.14``  |
-+-----------------------------------+-----------------------------------+
-| ``SLUGIFY_USES_TEXT_UNIDECODE``   | In case of of installing airflow  |
-|                                   | 1.10.2 or 1.10.1 you need to      |
-|                                   | set this arg to ``yes``.          |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_WWW``                   | In case of Airflow 2.0 it should  |
-|                                   | be "www", in case of Airflow 1.10 |
-|                                   | series it should be "www_rbac".   |
-|                                   | See examples below                |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_SOURCES_FROM``          | Sources of Airflow. Set it to     |
-|                                   | "empty" to avoid costly           |
-|                                   | Docker context copying            |
-|                                   | in case of installation from      |
-|                                   | the package or from GitHub URL.   |
-|                                   | See examples below                |
-+-----------------------------------+-----------------------------------+
-| ``AIRFLOW_SOURCES_TO``            | Target for Airflow sources. Set   |
-|                                   | to "/empty" to avoid costly       |
-|                                   | Docker context copying            |
-|                                   | in case of installation from      |
-|                                   | the package or from GitHub URL.   |
-|                                   | See examples below                |
-+-----------------------------------+-----------------------------------+
-
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| Build argument                    | Default                | What to specify                                                                   |
++===================================+========================+===================================================================================+
+| ``AIRFLOW_INSTALLATION_METHOD``   | ``apache-airflow``     | Should point to the installation method of Apache Airflow. It can be              |
+|                                   |                        | ``apache-airflow`` for installation from packages and URL to installation from    |
+|                                   |                        | GitHub repository tag or branch or "." to install from sources.                   |
+|                                   |                        | Note that installing from local sources requires appropriate values of the        |
+|                                   |                        | ``AIRFLOW_SOURCES_FROM`` and ``AIRFLOW_SOURCES_TO`` variables as described below. |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_INSTALL_VERSION``       |                        | Optional - might be used for package installation of different Airflow version    |
+|                                   |                        | for example"==1.10.14". For consistency, you should also set``AIRFLOW_VERSION``   |
+|                                   |                        | to the same value AIRFLOW_VERSION is embedded as label in the image created.      |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | ``constraints-master`` | Reference (branch or tag) from GitHub where constraints file is taken from.       |
+|                                   |                        | It can be ``constraints-master`` but also can be``constraints-1-10`` for          |
+|                                   |                        | 1.10.*  installations. In case of building specific version                       |
+|                                   |                        | you want to point it to specific tag, for example ``constraints-1.10.14``         |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_WWW``                   | ``www``                | In case of Airflow 2.0 it should be "www", in case of Airflow 1.10                |
+|                                   |                        | series it should be "www_rbac".                                                   |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_SOURCES_FROM``          | ``empty``              | Sources of Airflow. Set it to "." when you install airflow from                   |
+|                                   |                        | local sources.                                                                    |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
+| ``AIRFLOW_SOURCES_TO``            | ``/empty``             | Target for Airflow sources. Set to "/opt/airflow" when                            |
+|                                   |                        | you want to install airflow from local sources.                                   |
++-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
 
 This builds production image in version 3.6 with default extras from the local sources (master version
 of 2.0 currently):
@@ -572,7 +560,7 @@ constraints taken from constraints-1-10-12 branch in GitHub.
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="https://github.com/apache/airflow/archive/1.10.14.tar.gz#egg=apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/1.10.14.tar.gz#egg=apache-airflow" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
@@ -587,7 +575,7 @@ of v1-10-test branch.
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
@@ -603,14 +591,14 @@ additional python dependencies and pre-installed pip dependencies from 1.10.14 t
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_BRANCH="v1-10-test" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1.10.14" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
-    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs"
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs" \
     --build-arg ADDITIONAL_PYTHON_DEPS="sshtunnel oauth2client"
 
 This builds the production image in version 3.7 with additional airflow extras from 1.10.14 PyPI package and
@@ -621,14 +609,14 @@ additional apt dev and runtime dependencies.
   docker build . \
     --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \
     --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \
-    --build-arg AIRFLOW_INSTALL_SOURCES="apache-airflow" \
+    --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \
     --build-arg AIRFLOW_VERSION="1.10.14" \
     --build-arg AIRFLOW_INSTALL_VERSION="==1.10.14" \
     --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-1-10" \
     --build-arg AIRFLOW_SOURCES_FROM="empty" \
     --build-arg AIRFLOW_SOURCES_TO="/empty" \
-    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc"
-    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++"
+    --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \
+    --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \
     --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless"
 
 
diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/images/ci_build_dockerhub.sh
index a0ad0e6..e5e230b 100755
--- a/scripts/ci/images/ci_build_dockerhub.sh
+++ b/scripts/ci/images/ci_build_dockerhub.sh
@@ -49,46 +49,83 @@ echo "DOCKER_TAG=${DOCKER_TAG}"
 echo "Detected PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION}"
 echo
 
-(
-    export INSTALL_FROM_PYPI="true"
-    export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
-    export INSTALL_PROVIDERS_FROM_SOURCES="true"
-    export AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
-    export DOCKER_CACHE="pulled"
-    # shellcheck source=scripts/ci/libraries/_script_init.sh
-    . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
-
+if [[ ! "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
     echo
-    echo "Building and pushing CI image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
+    echo "Building airflow from branch or non-release tag: ${DOCKER_TAG}"
     echo
-    rm -rf "${BUILD_CACHE_DIR}"
-    build_images::prepare_ci_build
-    build_images::rebuild_ci_image_if_needed
-    if [[ ! "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
-        # Do not push if we are building a tagged version
-        push_pull_remove_images::push_ci_images
-    fi
-)
+    # Only build and push CI image for the nightly-master, v1-10-test and v2-0-test branches
+    # for tagged releases we build everything from PyPI, so we do not need CI images
+    # For development images, we have to build all packages from current sources because we want to produce
+    # `Latest and greatest` image from those branches. We need to build and push CI image as well as PROD
+    # image but we need to build CI image first, in order to use it to prepare provider packages
+    # The CI image provides an environment where we can reproducibly download the right .whl packages
+    # and build the provider packages and then build the production image using those .whl packages
+    # prepared. This is as close as it can get to production images - everything is build from
+    # packages, but not from PyPI - those packages are built locally using the latest sources!
 
-(
-    export INSTALL_FROM_PYPI="false"
-    export INSTALL_FROM_DOCKER_CONTEXT_FILES="true"
+    # Note - we need sub-processes here, because we can run _script_init.sh only once per process
+    # and it determines how to build the image - since we are building two images here
+    # we need to run those in sub-processes
+    (
+        export INSTALL_FROM_PYPI="true"
+        export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
+        export INSTALL_PROVIDERS_FROM_SOURCES="true"
+        export AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
+        export DOCKER_CACHE="pulled"
+        # shellcheck source=scripts/ci/libraries/_script_init.sh
+        . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+        echo
+        echo "Building and pushing CI image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
+        echo
+        rm -rf "${BUILD_CACHE_DIR}"
+        rm -rf "${AIRFLOW_SOURCES}/docker-context-files/*"
+        build_images::prepare_ci_build
+        build_images::rebuild_ci_image_if_needed
+        push_pull_remove_images::push_ci_images
+    )
+    (
+        export INSTALL_FROM_PYPI="false"
+        export INSTALL_FROM_DOCKER_CONTEXT_FILES="true"
+        export INSTALL_PROVIDERS_FROM_SOURCES="false"
+        export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
+        export DOCKER_CACHE="pulled"
+        # shellcheck source=scripts/ci/libraries/_script_init.sh
+        . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+        echo
+        echo "Building and pushing PROD image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
+        echo
+        rm -rf "${BUILD_CACHE_DIR}"
+        rm -rf "${AIRFLOW_SOURCES}/docker-context-files/*"
+        build_images::prepare_prod_build
+        build_images::build_prod_images_from_packages
+        push_pull_remove_images::push_prod_images
+    )
+else
+    echo
+    echo "Building airflow from release tag: ${DOCKER_TAG}"
+    echo
+    # This is an imaae built from the "release" tag (either RC or final one).
+    # In this case all packages are taken from PyPI rather than from locally built sources
+    export INSTALL_FROM_PYPI="true"
+    export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
     export INSTALL_PROVIDERS_FROM_SOURCES="false"
     export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
-    export DOCKER_CACHE="pulled"
+    export DOCKER_CACHE="local"
+    # Name the image based on the TAG rather than based on the branch name
+    export FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}"
+    export AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${INSTALL_AIRFLOW_VERSION}"
+    export AIRFLOW_SOURCES_FROM="empty"
+    export AIRFLOW_SOURCES_TO="/empty"
+    export INSTALL_AIRFLOW_VERSION="${DOCKER_TAG%-python*}"
 
-    if [[ "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
-        # Disable cache and set name of the tag as image name if we are building a tagged version
-        export DOCKER_CACHE="disabled"
-        export FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}"
-    fi
     # shellcheck source=scripts/ci/libraries/_script_init.sh
     . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
     echo
     echo "Building and pushing PROD image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
     echo
     rm -rf "${BUILD_CACHE_DIR}"
+    rm -rf "${AIRFLOW_SOURCES}/docker-context-files/*"
     build_images::prepare_prod_build
-    build_images::build_prod_images_from_packages
+    build_images::build_prod_images
     push_pull_remove_images::push_prod_images
-)
+fi
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 8f48c16..296124f 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -30,34 +30,53 @@ function build_images::add_build_args_for_remote_install() {
         EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
             "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE}"
         )
-    fi
-    if [[ "${AIRFLOW_CONSTRAINTS_LOCATION}" != "" ]]; then
-        EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-            "--build-arg" "AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}"
-        )
-    fi
-    if [[ ${AIRFLOW_VERSION} =~ [^0-9]*1[^0-9]*10[^0-9]([0-9]*) ]]; then
-        # All types of references/versions match this regexp for 1.10 series
-        # for example v1_10_test, 1.10.10, 1.10.9 etc. ${BASH_REMATCH[1]} matches last
-        # minor digit of version and it's length is 0 for v1_10_test, 1 for 1.10.9 and 2 for 1.10.10+
-        AIRFLOW_MINOR_VERSION_NUMBER=${BASH_REMATCH[1]}
-        if [[ ${#AIRFLOW_MINOR_VERSION_NUMBER} == "0" ]]; then
-            # For v1_10_* branches use constraints-1-10 branch
+    else
+        if [[ ${AIRFLOW_VERSION} =~ [^0-9]*1[^0-9]*10[^0-9]([0-9]*) ]]; then
+            # All types of references/versions match this regexp for 1.10 series
+            # for example v1_10_test, 1.10.10, 1.10.9 etc. ${BASH_REMATCH[1]} matches last
+            # minor digit of version and it's length is 0 for v1_10_test, 1 for 1.10.9 and 2 for 1.10.10+
+            AIRFLOW_MINOR_VERSION_NUMBER=${BASH_REMATCH[1]}
+            if [[ ${#AIRFLOW_MINOR_VERSION_NUMBER} == "0" ]]; then
+                # For v1_10_* branches use constraints-1-10 branch
+                EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
+                    "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-1-10"
+                )
+            else
+                EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
+                    # For specified minor version of 1.10 or v1 branch use specific reference constraints
+                    "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}"
+                )
+            fi
+        elif  [[ ${AIRFLOW_VERSION} =~ v?2.* ]]; then
             EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-1-10"
+                # For specified minor version of 2.0 or v2 branch use specific reference constraints
+                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}"
             )
         else
+            # For all other we just get the default constraint branch coming from the _initialization.sh
             EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-                # For specified minor version of 1.10 use specific reference constraints
-                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION}"
+                "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}"
             )
         fi
-        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v1-10-test"
-    else
-        # For all other (master, 2.0+) we just get the default constraint branch
+    fi
+    if [[ "${AIRFLOW_CONSTRAINTS_LOCATION}" != "" ]]; then
         EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-            "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}"
+            "--build-arg" "AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}"
         )
+    fi
+    # Depending on the version built, we choose the right branch for preloading the packages from
+    # If we run build for v1-10-test builds we should choose v1-10-test, for v2-0-test we choose v2-0-test
+    # all other builds when you choose a specific version (1.0 or 2.0 series) should choose stable branch
+    # to preload. For all other builds we use the default branch defined in _initialization.sh
+    if [[ ${AIRFLOW_VERSION} == 'v1-10-test' ]]; then
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v1-10-test"
+    elif [[ ${AIRFLOW_VERSION} =~ v?1.* ]]; then
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v1-10-stable"
+    elif [[ ${AIRFLOW_VERSION} == 'v2-0-test' ]]; then
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v2-0-test"
+    elif [[ ${AIRFLOW_VERSION} =~ v?2.* ]]; then
+        AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="v2-0-stable"
+    else
         AIRFLOW_BRANCH_FOR_PYPI_PRELOADING=${DEFAULT_BRANCH}
     fi
 }
@@ -638,23 +657,18 @@ function build_images::prepare_prod_build() {
     if [[ -n "${INSTALL_AIRFLOW_REFERENCE=}" ]]; then
         # When --install-airflow-reference is used then the image is build from github tag
         EXTRA_DOCKER_PROD_BUILD_FLAGS=(
-            "--build-arg" "AIRFLOW_INSTALL_SOURCES=https://github.com/apache/airflow/archive/${INSTALL_AIRFLOW_REFERENCE}.tar.gz#egg=apache-airflow"
+            "--build-arg" "AIRFLOW_INSTALLATION_METHOD=https://github.com/apache/airflow/archive/${INSTALL_AIRFLOW_REFERENCE}.tar.gz#egg=apache-airflow"
         )
         export AIRFLOW_VERSION="${INSTALL_AIRFLOW_REFERENCE}"
         build_images::add_build_args_for_remote_install
     elif [[ -n "${INSTALL_AIRFLOW_VERSION=}" ]]; then
         # When --install-airflow-version is used then the image is build from PIP package
         EXTRA_DOCKER_PROD_BUILD_FLAGS=(
-            "--build-arg" "AIRFLOW_INSTALL_SOURCES=apache-airflow"
+            "--build-arg" "AIRFLOW_INSTALLATION_METHOD=apache-airflow"
             "--build-arg" "AIRFLOW_INSTALL_VERSION===${INSTALL_AIRFLOW_VERSION}"
             "--build-arg" "AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION}"
         )
         export AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
-        if [[ ${AIRFLOW_VERSION} == "1.10.2" || ${AIRFLOW_VERSION} == "1.10.1" ]]; then
-            EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-                "--build-arg" "SLUGIFY_USES_TEXT_UNIDECODE=yes"
-            )
-        fi
         build_images::add_build_args_for_remote_install
     else
         # When no airflow version/reference is specified, production image is built from local sources
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 5f2ec6d..ff7abe9 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -148,11 +148,25 @@ function initialization::initialize_base_variables() {
     export INSTALLED_EXTRAS="async,amazon,celery,kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,imap,google,azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
     readonly INSTALLED_EXTRAS
 
-    PIP_VERSION="20.2.4"
+    # default version of PIP USED (This has to be < 20.3 until https://github.com/apache/airflow/issues/12838 is solved)
+    PIP_VERSION=${PIP_VERSION:="20.2.4"}
     export PIP_VERSION
 
-    WHEEL_VERSION="0.35.1"
+    # We also pin version of wheel used to get consistent builds
+    WHEEL_VERSION=${WHEEL_VERSION:="0.36.1"}
     export WHEEL_VERSION
+
+    # Sources by default are installed from local sources when using breeze/ci
+    AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM:="."}
+    export AIRFLOW_SOURCES_FROM
+
+    # They are copied to /opt/airflow by default (breeze and ci)
+    AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES_TO:="/opt/airflow"}
+    export AIRFLOW_SOURCES_TO
+
+    # And installed from there (breeze and ci)
+    AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION:="."}
+    export AIRFLOW_INSTALL_VERSION
 }
 
 # Determine current branch
@@ -461,6 +475,11 @@ function initialization::initialize_test_variables() {
     export TEST_TYPE=${TEST_TYPE:=""}
 }
 
+function initialization::initialize_package_variables() {
+    export PACKAGE_FORMAT=${PACKAGE_FORMAT:="wheel"}
+}
+
+
 function initialization::initialize_build_image_variables() {
     REMOTE_IMAGE_CONTAINER_ID_FILE="${AIRFLOW_SOURCES}/manifests/remote-airflow-manifest-image"
     LOCAL_IMAGE_BUILD_CACHE_HASH_FILE="${AIRFLOW_SOURCES}/manifests/local-build-cache-hash"
@@ -504,6 +523,7 @@ function initialization::initialize_common_environment() {
     initialization::initialize_git_variables
     initialization::initialize_github_variables
     initialization::initialize_test_variables
+    initialization::initialize_package_variables
     initialization::initialize_build_image_variables
 }
 
@@ -535,7 +555,6 @@ DockerHub variables:
 Mount variables:
 
     MOUNT_LOCAL_SOURCES: ${MOUNT_LOCAL_SOURCES}
-    MOUNT_FILES: ${MOUNT_FILES}
 
 Force variables:
 
@@ -597,15 +616,13 @@ Detected CI build environment:
     CI_BUILD_ID=${CI_BUILD_ID}
     CI_JOB_ID=${CI_JOB_ID}
     CI_EVENT_TYPE=${CI_EVENT_TYPE}
-    CI_SOURCE_REPO=${CI_SOURCE_REPO}
-    CI_SOURCE_BRANCH=${CI_SOURCE_BRANCH}
 
 Initialization variables:
 
     INIT_SCRIPT_FILE: ${INIT_SCRIPT_FILE=}
     LOAD_DEFAULT_CONNECTIONS: ${LOAD_DEFAULT_CONNECTIONS}
     LOAD_EXAMPLES: ${LOAD_EXAMPLES}
-    INSTALL_WHEELS: ${INSTALL_WHEELS=}
+    INSTALL_PACKAGES_FROM_DIST: ${INSTALL_PACKAGES_FROM_DIST=}
     DISABLE_RBAC: ${DISABLE_RBAC}
 
 Test variables:
@@ -629,30 +646,6 @@ function initialization::get_environment_for_builds_on_ci() {
         export CI_JOB_ID="${GITHUB_JOB}"
         export CI_EVENT_TYPE="${GITHUB_EVENT_NAME}"
         export CI_REF="${GITHUB_REF:=}"
-        if [[ ${CI_EVENT_TYPE:=} == "pull_request" ]]; then
-            # default name of the source repo (assuming it's forked without rename)
-            export SOURCE_AIRFLOW_REPO=${SOURCE_AIRFLOW_REPO:="airflow"}
-            # For Pull Requests it's ambiguous to find the PR and we need to
-            # assume that name of repo is airflow but it could be overridden in case it's not
-            export CI_SOURCE_REPO="${GITHUB_ACTOR}/${SOURCE_AIRFLOW_REPO}"
-            export CI_SOURCE_BRANCH="${GITHUB_HEAD_REF}"
-            BRANCH_EXISTS=$(git ls-remote --heads \
-                "https://github.com/${CI_SOURCE_REPO}.git" "${CI_SOURCE_BRANCH}" || true)
-            if [[ -z ${BRANCH_EXISTS=} ]]; then
-                verbosity::print_info
-                verbosity::print_info "https://github.com/${CI_SOURCE_REPO}.git Branch ${CI_SOURCE_BRANCH} does not exist"
-                verbosity::print_info
-                verbosity::print_info
-                verbosity::print_info "Fallback to https://github.com/${CI_TARGET_REPO}.git Branch ${CI_TARGET_BRANCH}"
-                verbosity::print_info
-                # Fallback to the target repository if the repo does not exist
-                export CI_SOURCE_REPO="${CI_TARGET_REPO}"
-                export CI_SOURCE_BRANCH="${CI_TARGET_BRANCH}"
-            fi
-        else
-            export CI_SOURCE_REPO="${CI_TARGET_REPO}"
-            export CI_SOURCE_BRANCH="${CI_TARGET_BRANCH}"
-        fi
     else
         # CI PR settings
         export CI_TARGET_REPO="${CI_TARGET_REPO="apache/airflow"}"
@@ -661,9 +654,6 @@ function initialization::get_environment_for_builds_on_ci() {
         export CI_JOB_ID="${CI_JOB_ID="0"}"
         export CI_EVENT_TYPE="${CI_EVENT_TYPE="pull_request"}"
         export CI_REF="${CI_REF="refs/head/master"}"
-
-        export CI_SOURCE_REPO="${CI_SOURCE_REPO="apache/airflow"}"
-        export CI_SOURCE_BRANCH="${DEFAULT_BRANCH="master"}"
     fi
 
     if [[ ${VERBOSE} == "true" && ${PRINT_INFO_FROM_SCRIPTS} == "true" ]]; then
@@ -726,8 +716,8 @@ function initialization::make_constants_read_only() {
     readonly IMAGE_TAG
 
     readonly AIRFLOW_PRE_CACHED_PIP_PACKAGES
-    readonly INSTALL_AIRFLOW_VIA_PIP
-    readonly AIRFLOW_LOCAL_PIP_WHEELS
+    readonly INSTALL_FROM_PYPI
+    readonly INSTALL_FROM_DOCKER_CONTEXT_FILES
     readonly AIRFLOW_CONSTRAINTS_REFERENCE
     readonly AIRFLOW_CONSTRAINTS_LOCATION
 
diff --git a/scripts/ci/libraries/_parameters.sh b/scripts/ci/libraries/_parameters.sh
index 566585e..7f15990 100644
--- a/scripts/ci/libraries/_parameters.sh
+++ b/scripts/ci/libraries/_parameters.sh
@@ -26,7 +26,6 @@ function parameters::save_to_file() {
     # shellcheck disable=SC2005
     echo "$(eval echo "\$$1")" >"${BUILD_CACHE_DIR}/.$1"
 }
-
 # check if parameter set for the variable is allowed (should be on the _breeze_allowed list)
 # parameters:
 # $1 - name of the variable
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index a5a827a..19cacfe 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -27,11 +27,10 @@
 function add_trap() {
     trap="${1}"
     shift
-    for signal in "${@}"
-    do
+    for signal in "${@}"; do
         # adding trap to exiting trap
         local handlers
-        handlers="$( trap -p "${signal}" | cut -f2 -d \' )"
+        handlers="$(trap -p "${signal}" | cut -f2 -d \')"
         # shellcheck disable=SC2064
         trap "${trap};${handlers}" "${signal}"
     done
@@ -52,7 +51,16 @@ function assert_in_container() {
 }
 
 function in_container_script_start() {
-    OUT_FILE_PRINTED_ON_ERROR=$(mktemp)
+    OUTPUT_PRINTED_ONLY_ON_ERROR=$(mktemp)
+    export OUTPUT_PRINTED_ONLY_ON_ERROR
+    readonly OUTPUT_PRINTED_ONLY_ON_ERROR
+
+    if [[ ${VERBOSE=} == "true" ]]; then
+        echo
+        echo "Output is redirected to ${OUTPUT_PRINTED_ONLY_ON_ERROR} and will be printed on error only"
+        echo
+    fi
+
     if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then
         set -x
     fi
@@ -62,14 +70,14 @@ function in_container_script_end() {
     #shellcheck disable=2181
     EXIT_CODE=$?
     if [[ ${EXIT_CODE} != 0 ]]; then
-        if [[ "${PRINT_INFO_FROM_SCRIPTS=="true"}" == "true" ]] ;then
-            if [[ -f ${OUT_FILE_PRINTED_ON_ERROR} ]]; then
+        if [[ "${PRINT_INFO_FROM_SCRIPTS="true"}" == "true" ]]; then
+            if [[ -f "${OUTPUT_PRINTED_ONLY_ON_ERROR}" ]]; then
                 echo "###########################################################################################"
                 echo
                 echo "${COLOR_BLUE} EXIT CODE: ${EXIT_CODE} in container (See above for error message). Below is the output of the last action! ${COLOR_RESET}"
                 echo
                 echo "${COLOR_BLUE}***  BEGINNING OF THE LAST COMMAND OUTPUT *** ${COLOR_RESET}"
-                cat "${OUT_FILE_PRINTED_ON_ERROR}"
+                cat "${OUTPUT_PRINTED_ONLY_ON_ERROR}"
                 echo "${COLOR_BLUE}***  END OF THE LAST COMMAND OUTPUT ***  ${COLOR_RESET}"
                 echo
                 echo "${COLOR_BLUE} EXIT CODE: ${EXIT_CODE} in container. The actual error might be above the output!  ${COLOR_RESET}"
@@ -137,9 +145,9 @@ function in_container_fix_ownership() {
         if [[ ${VERBOSE} == "true" ]]; then
             echo "Fixing ownership of mounted files"
         fi
-        sudo find "${DIRECTORIES_TO_FIX[@]}" -print0 -user root 2>/dev/null \
-            | sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference ||
-                true >/dev/null 2>&1
+        sudo find "${DIRECTORIES_TO_FIX[@]}" -print0 -user root 2>/dev/null |
+            sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference ||
+            true >/dev/null 2>&1
         if [[ ${VERBOSE} == "true" ]]; then
             echo "Fixed ownership of mounted files"
         fi
@@ -157,7 +165,7 @@ function in_container_clear_tmp() {
 }
 
 function in_container_go_to_airflow_sources() {
-    pushd "${AIRFLOW_SOURCES}"  &>/dev/null || exit 1
+    pushd "${AIRFLOW_SOURCES}" &>/dev/null || exit 1
 }
 
 function in_container_basic_sanity_check() {
@@ -176,7 +184,7 @@ function start_output_heartbeat() {
     echo "Starting output heartbeat"
     echo
 
-    bash 2> /dev/null <<EOF &
+    bash 2>/dev/null <<EOF &
 while true; do
   echo "\$(date): ${MESSAGE} "
   sleep ${INTERVAL}
@@ -187,41 +195,7 @@ EOF
 
 function stop_output_heartbeat() {
     kill "${HEARTBEAT_PID}" || true
-    wait "${HEARTBEAT_PID}" || true 2> /dev/null
-}
-
-function setup_kerberos() {
-    FQDN=$(hostname)
-    ADMIN="admin"
-    PASS="airflow"
-    KRB5_KTNAME=/etc/airflow.keytab
-
-    sudo cp "${AIRFLOW_SOURCES}/scripts/in_container/krb5/krb5.conf" /etc/krb5.conf
-
-    echo -e "${PASS}\n${PASS}" | \
-        sudo kadmin -p "${ADMIN}/admin" -w "${PASS}" -q "addprinc -randkey airflow/${FQDN}" 2>&1 \
-          | sudo tee "${AIRFLOW_HOME}/logs/kadmin_1.log" >/dev/null
-    RES_1=$?
-
-    sudo kadmin -p "${ADMIN}/admin" -w "${PASS}" -q "ktadd -k ${KRB5_KTNAME} airflow" 2>&1 \
-          | sudo tee "${AIRFLOW_HOME}/logs/kadmin_2.log" >/dev/null
-    RES_2=$?
-
-    sudo kadmin -p "${ADMIN}/admin" -w "${PASS}" -q "ktadd -k ${KRB5_KTNAME} airflow/${FQDN}" 2>&1 \
-          | sudo tee "${AIRFLOW_HOME}/logs``/kadmin_3.log" >/dev/null
-    RES_3=$?
-
-    if [[ ${RES_1} != 0 || ${RES_2} != 0 || ${RES_3} != 0 ]]; then
-        echo
-        echo "Error when setting up Kerberos: ${RES_1} ${RES_2} ${RES_3}}!"
-        echo
-        exit 1
-    else
-        echo
-        echo "Kerberos enabled and working."
-        echo
-        sudo chmod 0644 "${KRB5_KTNAME}"
-    fi
+    wait "${HEARTBEAT_PID}" || true 2>/dev/null
 }
 
 function dump_airflow_logs() {
@@ -237,17 +211,217 @@ function dump_airflow_logs() {
     echo "###########################################################################################"
 }
 
-function install_released_airflow_version() {
+function install_airflow_from_wheel() {
+    local extras
+    extras="${1}"
+    local airflow_package
+    airflow_package=$(find /dist/ -maxdepth 1 -type f -name 'apache_airflow-*.whl')
+    echo
+    echo "Found package: ${airflow_package}. Installing."
+    echo
+    if [[ -z "${airflow_package}" ]]; then
+        >&2 echo
+        >&2 echo "ERROR! Could not find airflow wheel package to install in dist"
+        >&2 echo
+        exit 4
+    fi
+    pip install "${airflow_package}${1}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+function install_remaining_dependencies() {
+    pip install apache-beam[gcp] >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+function uninstall_airflow() {
+    echo
+    echo "Uninstalling airflow"
+    echo
     pip uninstall -y apache-airflow || true
+    echo
+    echo "Remove all AIRFLOW_HOME remnants"
+    echo
     find /root/airflow/ -type f -print0 | xargs -0 rm -f --
-    if [[ ${1} == "1.10.2" || ${1} == "1.10.1" ]]; then
-        export SLUGIFY_USES_TEXT_UNIDECODE=yes
+}
+
+function uninstall_providers() {
+    echo
+    echo "Uninstalling all provider packages"
+    echo
+    local provider_packages_to_uninstall
+    provider_packages_to_uninstall=$(pip freeze | grep apache-airflow-providers || true)
+    if [[ -n ${provider_packages_to_uninstall} ]]; then
+        echo "${provider_packages_to_uninstall}" | xargs pip uninstall -y || true 2>/dev/null
     fi
+}
+
+function uninstall_airflow_and_providers() {
+    uninstall_providers
+    uninstall_airflow
+}
+
+function install_all_airflow_dependencies() {
+    echo
+    echo "Installing dependencies from 'all' extras"
+    echo
+    pip install ".[all]" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+function install_released_airflow_version() {
+    local version="${1}"
+    local extras="${2}"
+    echo
+    echo "Installing released ${version} version of airflow with extras ${extras}"
+    echo
+
     rm -rf "${AIRFLOW_SOURCES}"/*.egg-info
-    INSTALLS=("apache-airflow==${1}" "werkzeug<1.0.0")
-    pip install --upgrade "${INSTALLS[@]}"
+    pip install --upgrade "apache-airflow${extras}==${version}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+function install_all_provider_packages_from_wheels() {
+    echo
+    echo "Installing all provider packages from wheels"
+    echo
+    pip install /dist/apache_airflow*providers_*.whl >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
 }
 
+function install_all_provider_packages_from_tar_gz_files() {
+    echo
+    echo "Installing all provider packages from .tar.gz"
+    echo
+    pip install /dist/apache-airflow-*providers-*.tar.gz >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
+
+function verify_suffix_versions_for_package_preparation() {
+    TARGET_VERSION_SUFFIX=""
+    FILE_VERSION_SUFFIX=""
+
+    VERSION_SUFFIX_FOR_PYPI=${VERSION_SUFFIX_FOR_PYPI:=""}
+    readonly VERSION_SUFFIX_FOR_PYPI
+
+    VERSION_SUFFIX_FOR_SVN=${VERSION_SUFFIX_FOR_SVN:=""}
+
+    if [[ ${VERSION_SUFFIX_FOR_PYPI} != "" ]]; then
+        echo
+        echo "Version suffix for PyPI = ${VERSION_SUFFIX_FOR_PYPI}"
+        echo
+    fi
+    if [[ ${VERSION_SUFFIX_FOR_SVN} != "" ]]; then
+        echo
+        echo "Version suffix for SVN  = ${VERSION_SUFFIX_FOR_SVN}"
+        echo
+    fi
+
+    if [[ ${VERSION_SUFFIX_FOR_SVN} =~ ^rc ]]; then
+        echo """
+${COLOR_YELLOW_WARNING} The version suffix for SVN is used only for file names.
+         The version inside the packages has no version suffix.
+         This way we can just rename files when they graduate to final release.
+${COLOR_RESET}
+"""
+        echo
+        echo "This suffix is added '${VERSION_SUFFIX_FOR_SVN}' "
+        echo
+        FILE_VERSION_SUFFIX=${VERSION_SUFFIX_FOR_SVN}
+        VERSION_SUFFIX_FOR_SVN=""
+    fi
+    readonly FILE_VERSION_SUFFIX
+    readonly VERSION_SUFFIX_FOR_SVN
+
+    export FILE_VERSION_SUFFIX
+    export VERSION_SUFFIX_FOR_SVN
+    export VERSION_SUFFIX_FOR_PYPI
+
+    if [[ ${VERSION_SUFFIX_FOR_PYPI} != '' && ${VERSION_SUFFIX_FOR_SVN} != '' ]]; then
+        if [[ ${VERSION_SUFFIX_FOR_PYPI} != "${VERSION_SUFFIX_FOR_SVN}" ]]; then
+            echo
+            echo "${COLOR_RED_ERROR} If you specify both PyPI and SVN version suffixes they must match  ${COLOR_RESET}"
+            echo
+            echo "However they are different: PyPI:'${VERSION_SUFFIX_FOR_PYPI}' vs. SVN:'${VERSION_SUFFIX_FOR_SVN}'"
+            echo
+            exit 1
+        else
+            if [[ ${VERSION_SUFFIX_FOR_PYPI} =~ ^rc ]]; then
+                echo
+                echo "${COLOR_RED_ERROR} If you prepare an RC candidate, you need to specify only PyPI suffix  ${COLOR_RESET}"
+                echo
+                echo "However you specified both: PyPI'${VERSION_SUFFIX_FOR_PYPI}' and SVN '${VERSION_SUFFIX_FOR_SVN}'"
+                echo
+                exit 2
+            fi
+            # Just use one of them - they are both the same:
+            TARGET_VERSION_SUFFIX=${VERSION_SUFFIX_FOR_PYPI}
+        fi
+    else
+        if [[ ${VERSION_SUFFIX_FOR_PYPI} == '' && ${VERSION_SUFFIX_FOR_SVN} == '' ]]; then
+            # Preparing "official version"
+            TARGET_VERSION_SUFFIX=""
+        else
+
+            if [[ ${VERSION_SUFFIX_FOR_PYPI} == '' ]]; then
+                echo
+                echo "${COLOR_RED_ERROR} You should never specify version for PyPI only.  ${COLOR_RESET}"
+                echo
+                echo "You specified PyPI suffix: '${VERSION_SUFFIX_FOR_PYPI}'"
+                echo
+                exit 3
+            fi
+            TARGET_VERSION_SUFFIX=${VERSION_SUFFIX_FOR_PYPI}${VERSION_SUFFIX_FOR_SVN}
+            if [[ ! ${TARGET_VERSION_SUFFIX} =~ rc.* ]]; then
+                echo
+                echo "${COLOR_RED_ERROR} If you prepare an alpha/beta release, you need to specify both PyPI/SVN suffixes and they have to match.  ${COLOR_RESET}"
+                echo
+                echo "And they have to match. You specified only one suffix:  ${TARGET_VERSION_SUFFIX}."
+                echo
+                exit 4
+            fi
+        fi
+    fi
+    readonly TARGET_VERSION_SUFFIX
+    export TARGET_VERSION_SUFFIX
+}
+
+function filename_to_python_module() {
+    # Turn the file name into a python package name
+    file="$1"
+    no_leading_dotslash="${file#./}"
+    no_py="${no_leading_dotslash/.py/}"
+    no_init="${no_py/\/__init__/}"
+    echo "${no_init//\//.}"
+}
+
+function import_all_provider_classes() {
+    echo
+    echo Importing all Airflow classes
+    echo
+
+    # We have to move to a directory where "airflow" is
+    unset PYTHONPATH
+    # We need to make sure we are not in the airflow checkout, otherwise it will automatically be added to the
+    # import path
+    cd /
+
+    declare -a IMPORT_CLASS_PARAMETERS
+
+    PROVIDER_PATHS=$(
+        python3 <<EOF 2>/dev/null
+import airflow.providers;
+path=airflow.providers.__path__
+for p in path._path:
+    print(p)
+EOF
+    )
+    export PROVIDER_PATHS
+
+    echo "Searching for providers packages in:"
+    echo "${PROVIDER_PATHS}"
+
+    while read -r provider_path; do
+        IMPORT_CLASS_PARAMETERS+=("--path" "${provider_path}")
+    done < <(echo "${PROVIDER_PATHS}")
+
+    python3 /opt/airflow/dev/import_all_classes.py "${IMPORT_CLASS_PARAMETERS[@]}"
+}
 
 function in_container_set_colors() {
     COLOR_BLUE=$'\e[34m'


[airflow] 31/44: Reset PIP version after eager upgrade (#13251)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit b22cd926c52597242d60c0d26facae2d4e32194e
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Tue Dec 22 14:23:54 2020 +0100

    Reset PIP version after eager upgrade (#13251)
    
    PIP upgrades itself after eager update, and since we (for now)
    stick with the 20.2.4 version we want to reset PIP to that
    version after eager upgrade.
    
    (cherry picked from commit c44092f8df9aa8555ef594d9fae4f28011a3a5a6)
---
 Dockerfile.ci | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/Dockerfile.ci b/Dockerfile.ci
index 67e5bb1..9a39aa1 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -320,8 +320,10 @@ ENV UPGRADE_TO_LATEST_CONSTRAINTS=${UPGRADE_TO_LATEST_CONSTRAINTS}
 RUN if [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
         if [[ "${UPGRADE_TO_LATEST_CONSTRAINTS}" != "false" ]]; then \
             pip install -e ".[${AIRFLOW_EXTRAS}]" --upgrade --upgrade-strategy eager; \
+            pip install --upgrade "pip==${PIP_VERSION}"; \
         else \
             pip install -e ".[${AIRFLOW_EXTRAS}]" --upgrade --upgrade-strategy only-if-needed; \
+            pip install --upgrade "pip==${PIP_VERSION}"; \
         fi; \
     fi
 


[airflow] 04/44: Enable PIP check for both CI and PROD image (#12664)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 52eb912c30cb043c6632b35c92cdcf82ddb02e01
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Fri Nov 27 21:33:50 2020 +0100

    Enable PIP check for both CI and PROD image (#12664)
    
    This PR enables PIP check after constraints have been updated
    to be stable and 'pip check' compliant in #12636
    
    (cherry picked from commit fa8af2d16551e287673d94a40cfb41e49d685412)
---
 scripts/ci/images/ci_wait_for_ci_image.sh   | 17 +++++++++++++++++
 scripts/ci/images/ci_wait_for_prod_image.sh | 18 ++++++++++++++++++
 2 files changed, 35 insertions(+)

diff --git a/scripts/ci/images/ci_wait_for_ci_image.sh b/scripts/ci/images/ci_wait_for_ci_image.sh
index 0c3ea08..4b52b9a 100755
--- a/scripts/ci/images/ci_wait_for_ci_image.sh
+++ b/scripts/ci/images/ci_wait_for_ci_image.sh
@@ -18,6 +18,17 @@
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
+function verify_ci_image_dependencies {
+    echo
+    echo "Checking if Airflow dependencies are non-conflicting in CI image."
+    echo
+
+    push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" \
+        "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+
+    docker run --rm --entrypoint /bin/bash "${AIRFLOW_CI_IMAGE}" -c 'pip check'
+}
+
 push_pull_remove_images::check_if_github_registry_wait_for_image_enabled
 
 push_pull_remove_images::check_if_jq_installed
@@ -32,3 +43,9 @@ echo
 
 push_pull_remove_images::wait_for_github_registry_image \
     "${AIRFLOW_CI_IMAGE_NAME}" "${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+
+echo
+echo "Verifying the ${AIRFLOW_CI_IMAGE_NAME} image after pulling it"
+echo
+
+verify_ci_image_dependencies
diff --git a/scripts/ci/images/ci_wait_for_prod_image.sh b/scripts/ci/images/ci_wait_for_prod_image.sh
index 1c7cef5..e53aec1 100755
--- a/scripts/ci/images/ci_wait_for_prod_image.sh
+++ b/scripts/ci/images/ci_wait_for_prod_image.sh
@@ -18,6 +18,18 @@
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
+function verify_prod_image_dependencies {
+    echo
+    echo "Checking if Airflow dependencies are non-conflicting in PROD image."
+    echo
+
+    push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_IMAGE}" \
+        "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+
+    # TODO: remove the | true after we fixed pip check for prod image
+    docker run --rm --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" -c 'pip check' || true
+}
+
 push_pull_remove_images::check_if_github_registry_wait_for_image_enabled
 
 push_pull_remove_images::check_if_jq_installed
@@ -32,3 +44,9 @@ echo
 
 push_pull_remove_images::wait_for_github_registry_image \
     "${AIRFLOW_PROD_IMAGE_NAME}" "${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
+
+echo
+echo "Verifying the ${AIRFLOW_PROD_IMAGE_NAME} image after pulling it"
+echo
+
+verify_prod_image_dependencies


[airflow] 26/44: Add identity pre-commit hook (#13089)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 97c2d63b1b09a94a672f5f3cdba326c42b4780ef
Author: Ruben Laguna <ru...@gmail.com>
AuthorDate: Wed Dec 16 09:03:48 2020 +0100

    Add identity pre-commit hook (#13089)
    
    (cherry picked from commit 99c2e03da9b552b74d78b1b10ce9b8f85c686678)
---
 .pre-commit-config.yaml |  1 +
 BREEZE.rst              | 10 +++++++++-
 STATIC_CODE_CHECKS.rst  |  2 ++
 breeze-complete         |  1 +
 4 files changed, 13 insertions(+), 1 deletion(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 4c6b733..2e9edda 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -148,6 +148,7 @@ repos:
           - "2"
   - repo: meta
     hooks:
+      - id: identity
       - id: check-hooks-apply
   - repo: https://github.com/pre-commit/pre-commit-hooks
     rev: v3.3.0
diff --git a/BREEZE.rst b/BREEZE.rst
index 03290c3..43018a0 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -751,6 +751,14 @@ The above will run mypy check for all files.
       </a>
     </div>
 
+If you want ever need to get a list of the files that will be checked (for troubleshooting when playing with the
+``--from-ref`` and ``--to-ref``
+
+.. code-block:: bash
+
+     breeze static-check identity --verbose # currently staged files
+     breeze static-check identity --verbose -- --from-ref $(git merge-base master HEAD) --to-ref HEAD #  branch updates
+
 Building the Documentation
 --------------------------
 
@@ -1981,7 +1989,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
                  check-apache-license check-builtin-literals check-executables-have-shebangs
                  check-hooks-apply check-integrations check-merge-conflict check-xml debug-statements
                  detect-private-key doctoc dont-use-safe-filter end-of-file-fixer fix-encoding-pragma
-                 flake8 forbid-tabs helm-lint incorrect-use-of-LoggingMixin insert-license
+                 flake8 forbid-tabs helm-lint identity incorrect-use-of-LoggingMixin insert-license
                  language-matters lint-dockerfile lint-openapi markdownlint mermaid mixed-line-ending
                  mypy mypy-helm no-relative-imports pre-commit-descriptions pydevd python2-compile
                  python2-fastcheck python-no-log-warn rst-backticks setup-order setup-installation
diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst
index 3fb49c3..7f02299 100644
--- a/STATIC_CODE_CHECKS.rst
+++ b/STATIC_CODE_CHECKS.rst
@@ -96,6 +96,8 @@ require Breeze Docker images to be installed locally:
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``helm-lint``                         Verifies if helm lint passes for the chart
 ----------------------------------- ---------------------------------------------------------------- ------------
+``identity``                          Prints inputs to the static check hooks for troubleshooting
+----------------------------------- ---------------------------------------------------------------- ------------
 ``incorrect-use-of-LoggingMixin``     Checks if LoggingMixin is properly imported.
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``insert-license``                    Adds licenses for most file types.
diff --git a/breeze-complete b/breeze-complete
index cdf8fe9..7e1ccc6 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -85,6 +85,7 @@ fix-encoding-pragma
 flake8
 forbid-tabs
 helm-lint
+identity
 incorrect-use-of-LoggingMixin
 insert-license
 language-matters


[airflow] 29/44: fixup! Production images on CI are now built from packages (#12685)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c58a9d39bf0305bab4cd86aa2e1bbf747bd8038b
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Dec 16 17:04:21 2020 +0100

    fixup! Production images on CI are now built from packages (#12685)
---
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
index 43defdf..1bcc0e6 100755
--- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
@@ -23,9 +23,6 @@ export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
 export DOCKER_CACHE="local"
 export VERBOSE="true"
 
-export INSTALLED_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
-readonly INSTALLED_EXTRAS
-
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 


[airflow] 42/44: Also add codecov action to apache airflow repo (#13328)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 1626de8c475be169f1a84104b16bbd8895e99e59
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sun Dec 27 17:42:45 2020 +0100

    Also add codecov action to apache airflow repo (#13328)
    
    Follow up after #13327
    
    (cherry picked from commit 98896e4e327f256fd04087a49a13e16a246022c9)
---
 .github/workflows/ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1b337a3..7902f6c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -600,7 +600,7 @@ jobs:
       - name: "Removes unnecessary artifacts"
         run: ls ./coverage-files | grep -v coverage | xargs rm -rf
       - name: "Upload all coverage reports to codecov"
-        uses: codecov/codecov-action@v1
+        uses: apache/airflow-codecov-action@1fc7722ded4708880a5aea49f2bfafb9336f0c8d  # v1.1.1
         with:
           directory: "./coverage-files"
 


[airflow] 13/44: Apply labels to Docker images in a single instruction (#12931)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0934ff05500fa1da0915f2bbc5ca2d0ac66228cc
Author: Ash Berlin-Taylor <as...@firemirror.com>
AuthorDate: Wed Dec 9 05:19:38 2020 +0000

    Apply labels to Docker images in a single instruction (#12931)
    
    * Apply labels to Docker images in a single instruction
    
    While looking at the build logs for something else I noticed this
    oddity at the end of the CI logs:
    
    ```
    Tue, 08 Dec 2020 21:20:19 GMT Step 125/135 : LABEL org.apache.airflow.distro="debian"
    ...
    Tue, 08 Dec 2020 21:21:14 GMT Step 133/135 : LABEL org.apache.airflow.commitSha=${COMMIT_SHA}
    Tue, 08 Dec 2020 21:21:14 GMT  ---> Running in 1241a5f6cdb7
    Tue, 08 Dec 2020 21:21:21 GMT Removing intermediate container 1241a5f6cdb7
    ```
    
    Applying all the labels took 1m2s! Hopefully applying these in a single
    layer/command should speed things up.
    
    A less extreme example still took 43s
    
    ```
    Tue, 08 Dec 2020 20:44:40 GMT Step 125/135 : LABEL org.apache.airflow.distro="debian"
    ...
    Tue, 08 Dec 2020 20:45:18 GMT Step 133/135 : LABEL org.apache.airflow.commitSha=${COMMIT_SHA}
    Tue, 08 Dec 2020 20:45:18 GMT  ---> Running in dc601207dbcb
    Tue, 08 Dec 2020 20:45:23 GMT Removing intermediate container dc601207dbcb
    Tue, 08 Dec 2020 20:45:23 GMT  ---> 5aae5dd0f702
    ```
    
    * Update Dockerfile
    
    (cherry picked from commit 63ea88d1b1bdad5ce24f498fdf3600217069b4ad)
---
 Dockerfile    | 47 ++++++++++++++++++++++++-----------------------
 Dockerfile.ci | 18 +++++++++---------
 2 files changed, 33 insertions(+), 32 deletions(-)

diff --git a/Dockerfile b/Dockerfile
index 35f50b3..eaac1e1 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -260,19 +260,20 @@ RUN AIRFLOW_SITE_PACKAGE="/root/.local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/s
 RUN find /root/.local -executable -print0 | xargs --null chmod g+x && \
     find /root/.local -print0 | xargs --null chmod g+rw
 
-LABEL org.apache.airflow.distro="debian"
-LABEL org.apache.airflow.distro.version="buster"
-LABEL org.apache.airflow.module="airflow"
-LABEL org.apache.airflow.component="airflow"
-LABEL org.apache.airflow.image="airflow-build-image"
 
 ARG BUILD_ID
 ENV BUILD_ID=${BUILD_ID}
 ARG COMMIT_SHA
 ENV COMMIT_SHA=${COMMIT_SHA}
 
-LABEL org.apache.airflow.buildImage.buildId=${BUILD_ID}
-LABEL org.apache.airflow.buildImage.commitSha=${COMMIT_SHA}
+
+LABEL org.apache.airflow.distro="debian" \
+  org.apache.airflow.distro.version="buster" \
+  org.apache.airflow.module="airflow" \
+  org.apache.airflow.component="airflow" \
+  org.apache.airflow.image="airflow-build-image" \
+  org.apache.airflow.buildImage.buildId=${BUILD_ID} \
+  org.apache.airflow.buildImage.commitSha=${COMMIT_SHA}
 
 ##############################################################################################
 # This is the actual Airflow image - much smaller than the build one. We copy
@@ -284,13 +285,13 @@ SHELL ["/bin/bash", "-o", "pipefail", "-e", "-u", "-x", "-c"]
 ARG AIRFLOW_UID
 ARG AIRFLOW_GID
 
-LABEL org.apache.airflow.distro="debian"
-LABEL org.apache.airflow.distro.version="buster"
-LABEL org.apache.airflow.module="airflow"
-LABEL org.apache.airflow.component="airflow"
-LABEL org.apache.airflow.image="airflow"
-LABEL org.apache.airflow.uid="${AIRFLOW_UID}"
-LABEL org.apache.airflow.gid="${AIRFLOW_GID}"
+LABEL org.apache.airflow.distro="debian" \
+  org.apache.airflow.distro.version="buster" \
+  org.apache.airflow.module="airflow" \
+  org.apache.airflow.component="airflow" \
+  org.apache.airflow.image="airflow" \
+  org.apache.airflow.uid="${AIRFLOW_UID}" \
+  org.apache.airflow.gid="${AIRFLOW_GID}"
 
 ARG PYTHON_BASE_IMAGE
 ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE}
@@ -425,15 +426,15 @@ ENV BUILD_ID=${BUILD_ID}
 ARG COMMIT_SHA
 ENV COMMIT_SHA=${COMMIT_SHA}
 
-LABEL org.apache.airflow.distro="debian"
-LABEL org.apache.airflow.distro.version="buster"
-LABEL org.apache.airflow.module="airflow"
-LABEL org.apache.airflow.component="airflow"
-LABEL org.apache.airflow.image="airflow"
-LABEL org.apache.airflow.uid="${AIRFLOW_UID}"
-LABEL org.apache.airflow.gid="${AIRFLOW_GID}"
-LABEL org.apache.airflow.mainImage.buildId=${BUILD_ID}
-LABEL org.apache.airflow.mainImage.commitSha=${COMMIT_SHA}
+LABEL org.apache.airflow.distro="debian" \
+  org.apache.airflow.distro.version="buster" \
+  org.apache.airflow.module="airflow" \
+  org.apache.airflow.component="airflow" \
+  org.apache.airflow.image="airflow" \
+  org.apache.airflow.uid="${AIRFLOW_UID}" \
+  org.apache.airflow.gid="${AIRFLOW_GID}" \
+  org.apache.airflow.mainImage.buildId=${BUILD_ID} \
+  org.apache.airflow.mainImage.commitSha=${COMMIT_SHA}
 
 ENTRYPOINT ["/usr/bin/dumb-init", "--", "/entrypoint"]
 CMD ["--help"]
diff --git a/Dockerfile.ci b/Dockerfile.ci
index c71fae6..bd00238 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -387,15 +387,15 @@ ENV BUILD_ID=${BUILD_ID}
 ARG COMMIT_SHA
 ENV COMMIT_SHA=${COMMIT_SHA}
 
-LABEL org.apache.airflow.distro="debian"
-LABEL org.apache.airflow.distro.version="buster"
-LABEL org.apache.airflow.module="airflow"
-LABEL org.apache.airflow.component="airflow"
-LABEL org.apache.airflow.image="airflow-ci"
-LABEL org.apache.airflow.uid="0"
-LABEL org.apache.airflow.gid="0"
-LABEL org.apache.airflow.buildId=${BUILD_ID}
-LABEL org.apache.airflow.commitSha=${COMMIT_SHA}
+LABEL org.apache.airflow.distro="debian" \
+  org.apache.airflow.distro.version="buster" \
+  org.apache.airflow.module="airflow" \
+  org.apache.airflow.component="airflow" \
+  org.apache.airflow.image="airflow-ci" \
+  org.apache.airflow.uid="0" \
+  org.apache.airflow.gid="0" \
+  org.apache.airflow.buildId=${BUILD_ID} \
+  org.apache.airflow.commitSha=${COMMIT_SHA}
 
 EXPOSE 8080
 


[airflow] 41/44: Switch to Apache-owned GitHub actions (#13327)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit d774ff890077d26ee8d683b5f567634233452ca1
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sun Dec 27 17:13:50 2020 +0100

    Switch to Apache-owned GitHub actions (#13327)
    
    There was a change in Policy of ASF that only "Made by GitHub"
    actions and actions residing in Apache-owned repositories
    are allowed to be used for ASF projects. This was in
    response to a security incident.
    
    More details:
    
    Policy:
    
    * https://infra.apache.org/github-actions-secrets.html
    
    Discussion builds@apache.org:
    
    * https://lists.apache.org/thread.html/r435c45dfc28ec74e28314aa9db8a216a2b45ff7f27b15932035d3f65%40%3Cbuilds.apache.org%3E
    
    Discussion users@infra.apache.org:
    
    * https://lists.apache.org/thread.html/r900f8f9a874006ed8121bdc901a0d1acccbb340882c1f94dad61a5e9%40%3Cusers.infra.apache.org%3E
    
    (cherry picked from commit c6d66cd15fe85d3bca357c9e60ac8434b843e5d6)
---
 .github/workflows/build-images-workflow-run.yml    | 22 +++++++++++-----------
 .github/workflows/ci.yml                           |  4 ++--
 .../workflows/label_when_reviewed_workflow_run.yml | 14 +++++++-------
 3 files changed, 20 insertions(+), 20 deletions(-)

diff --git a/.github/workflows/build-images-workflow-run.yml b/.github/workflows/build-images-workflow-run.yml
index ee9c48c..778ff54 100644
--- a/.github/workflows/build-images-workflow-run.yml
+++ b/.github/workflows/build-images-workflow-run.yml
@@ -57,13 +57,13 @@ jobs:
       buildImages: ${{ steps.build-images.outputs.buildImages }}
     steps:
       - name: "Get information about the original trigger of the run"
-        uses: potiuk/get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
+        uses: apache/airflow-get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
         id: source-run-info
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           sourceRunId: ${{ github.event.workflow_run.id }}
       - name: "Cancel duplicated 'CI Build' runs"
-        uses: potiuk/cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           cancelMode: allDuplicates
@@ -80,7 +80,7 @@ jobs:
         # trick ¯\_(ツ)_/¯. We name the build-info job appropriately
         # and then we try to find and cancel all the jobs with the same Event + Repo + Branch as the
         # current Event/Repo/Branch combination.
-        uses: potiuk/cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
         with:
           cancelMode: namedJobs
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -96,7 +96,7 @@ jobs:
         # We also produce list of canceled "CI Build' runs as output, so that we
         # can cancel all the matching "Build Images" workflow runs in the two following steps.
         # Yeah. Adding to the complexity ¯\_(ツ)_/¯.
-        uses: potiuk/cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
         id: cancel-failed
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -129,14 +129,14 @@ jobs:
         # it to cancel any jobs that have matching names containing Source Run Id:
         # followed by one of the run ids. Yes I know it's super complex ¯\_(ツ)_/¯.
         if: env.BUILD_IMAGES == 'true' && steps.cancel-failed.outputs.cancelledRuns != '[]'
-        uses: potiuk/cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
         with:
           cancelMode: namedJobs
           token: ${{ secrets.GITHUB_TOKEN }}
           notifyPRCancel: true
           jobNameRegexps: ${{ steps.extract-cancelled-failed-runs.outputs.matching-regexp }}
       - name: "Cancel duplicated 'CodeQL' runs"
-        uses: potiuk/cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
         id: cancel
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -156,7 +156,7 @@ jobs:
         # trick ¯\_(ツ)_/¯. We name the build-info job appropriately and then we try to match
         # all the jobs with the same Event + Repo + Branch match and cancel all the duplicates for those
         # This might cancel own run, so this is the last step in the job
-        uses: potiuk/cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
         with:
           cancelMode: allDuplicatedNamedJobs
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -302,7 +302,7 @@ jobs:
               echo "::set-output name=proceed::false"
           fi
       - name: Initiate Github Checks for Building image
-        uses: LouisBrunner/checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
         id: build-image-check
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -356,7 +356,7 @@ jobs:
         run: ./scripts/ci/images/ci_push_production_images.sh
         if: matrix.image-type == 'PROD' && steps.defaults.outputs.proceed == 'true'
       - name: Update Github Checks for Building image with status
-        uses: LouisBrunner/checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
         if: always() && steps.defaults.outputs.proceed == 'true'
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -378,7 +378,7 @@ jobs:
     needs: [build-images]
     steps:
       - name: "Canceling the 'CI Build' source workflow in case of failure!"
-        uses: potiuk/cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           cancelMode: self
@@ -393,7 +393,7 @@ jobs:
     needs: [build-images]
     steps:
       - name: "Canceling the 'CI Build' source workflow in case of failure!"
-        uses: potiuk/cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           cancelMode: self
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a449d2c..1b337a3 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -99,7 +99,7 @@ jobs:
       pullRequestLabels: ${{ steps.source-run-info.outputs.pullRequestLabels }}
     steps:
       - name: "Get information about the PR"
-        uses: potiuk/get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
+        uses: apache/airflow-get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
         id: source-run-info
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -882,7 +882,7 @@ jobs:
       - name: "Commit changed constraint files for ${{needs.build-info.outputs.pythonVersions}}"
         run: ./scripts/ci/constraints/ci_commit_constraints.sh
       - name: "Push changes"
-        uses: ad-m/github-push-action@40bf560936a8022e68a3c00e7d2abefaf01305a6  # v0.6.0
+        uses: apache/airflow-github-push-action@40bf560936a8022e68a3c00e7d2abefaf01305a6  # v0.6.0
         with:
           github_token: ${{ secrets.GITHUB_TOKEN }}
           branch: ${{ steps.constraints-branch.outputs.branch }}
diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml
index e47d774..d66472b 100644
--- a/.github/workflows/label_when_reviewed_workflow_run.yml
+++ b/.github/workflows/label_when_reviewed_workflow_run.yml
@@ -30,13 +30,13 @@ jobs:
       labelSet: ${{ steps.label-when-reviewed.outputs.labelSet }}
     steps:
       - name: "Get information about the original trigger of the run"
-        uses: potiuk/get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
+        uses: apache/airflow-get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
         id: source-run-info
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           sourceRunId: ${{ github.event.workflow_run.id }}
       - name: Initiate Selective Build check
-        uses: LouisBrunner/checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
         id: selective-build-check
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -86,7 +86,7 @@ jobs:
             ./scripts/ci/selective_ci_checks.sh
           fi
       - name: "Label when approved by committers for PRs that require full tests"
-        uses: TobKed/label-when-approved-action@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
+        uses: apache/airflow-label-when-approved@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
         id: label-full-test-prs-when-approved-by-commiters
         if: >
           steps.selective-checks.outputs.run-tests == 'true' &&
@@ -102,7 +102,7 @@ jobs:
             If they don't merge it quickly - please rebase it to the latest master at your convenience,
             or amend the last commit of the PR, and push it with --force-with-lease.
       - name: "Initiate GitHub Check forcing rerun of SH ${{ github.event.pull_request.head.sha }}"
-        uses: LouisBrunner/checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
         id: full-test-check
         if: steps.label-full-test-prs-when-approved-by-commiters.outputs.labelSet == 'true'
         with:
@@ -117,7 +117,7 @@ jobs:
             [the run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})
             "}
       - name: "Label when approved by committers for PRs that do not require full tests"
-        uses: TobKed/label-when-approved-action@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
+        uses: apache/airflow-label-when-approved@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
         id: label-simple-test-prs-when-approved-by-commiters
         if: >
           steps.selective-checks.outputs.run-tests == 'true' &&
@@ -134,7 +134,7 @@ jobs:
             'full tests needed'. Then you should rebase to the latest master or amend the last commit
             of the PR, and push it with --force-with-lease.
       - name: "Label when approved by committers for PRs that do not require tests at all"
-        uses: TobKed/label-when-approved-action@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
+        uses: apache/airflow-label-when-approved@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
         id: label-no-test-prs-when-approved-by-commiters
         if: steps.selective-checks.outputs.run-tests != 'true'
         with:
@@ -148,7 +148,7 @@ jobs:
             needed and add the 'full tests needed' label. Then you should rebase it to the latest master
             or amend the last commit of the PR, and push it with --force-with-lease.
       - name: Update Selective Build check
-        uses: LouisBrunner/checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
         if: always()
         with:
           token: ${{ secrets.GITHUB_TOKEN }}


[airflow] 05/44: Improve wording of selective checks comments (#12701)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 50d6c9a8268e7d234a5e30258c0c530b514b22ef
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sun Nov 29 18:27:09 2020 +0100

    Improve wording of selective checks comments (#12701)
    
    (cherry picked from commit 7e426d3f4772232a8a707951b52ccef4b4caffa1)
---
 .../workflows/label_when_reviewed_workflow_run.yml | 24 ++++++++++++++--------
 1 file changed, 16 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml
index 6ea15b0..e47d774 100644
--- a/.github/workflows/label_when_reviewed_workflow_run.yml
+++ b/.github/workflows/label_when_reviewed_workflow_run.yml
@@ -97,21 +97,23 @@ jobs:
           require_committers_approval: 'true'
           pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }}
           comment: >
-            The PR needs to run all tests because it modifies core of Airflow! Please rebase it to latest
-            master or ask committer to re-run it!
+            The PR most likely needs to run full matrix of tests because it modifies parts of the core
+            of Airflow. However, committers might decide to merge it quickly and take the risk.
+            If they don't merge it quickly - please rebase it to the latest master at your convenience,
+            or amend the last commit of the PR, and push it with --force-with-lease.
       - name: "Initiate GitHub Check forcing rerun of SH ${{ github.event.pull_request.head.sha }}"
         uses: LouisBrunner/checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
         id: full-test-check
         if: steps.label-full-test-prs-when-approved-by-commiters.outputs.labelSet == 'true'
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
-          name: "Please rebase or re-run to run full tests"
+          name: "Please rebase or amend, and force push the PR to run full tests"
           status: "in_progress"
           sha: ${{ steps.source-run-info.outputs.sourceHeadSha }}
           details_url: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
           output: >
             {"summary":
-            "The PR needs to run all tests! This was determined via
+            "The PR likely needs to run all tests! This was determined via selective check in
             [the run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})
             "}
       - name: "Label when approved by committers for PRs that do not require full tests"
@@ -126,9 +128,11 @@ jobs:
           require_committers_approval: 'true'
           pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }}
           comment: >
-            The PR should be OK to be merged with just subset of tests as it does not modify Core of
-            Airflow. The committers might merge it or can add a label 'full tests needed' and re-run it
-            to run all tests if they see it is needed!
+            The PR is likely OK to be merged with just subset of tests for default Python and Database
+            versions without running the full matrix of tests, because it does not modify the core of
+            Airflow. If the committers decide that the full tests matrix is needed, they will add the label
+            'full tests needed'. Then you should rebase to the latest master or amend the last commit
+            of the PR, and push it with --force-with-lease.
       - name: "Label when approved by committers for PRs that do not require tests at all"
         uses: TobKed/label-when-approved-action@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
         id: label-no-test-prs-when-approved-by-commiters
@@ -138,7 +142,11 @@ jobs:
           label: 'okay to merge'
           pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }}
           require_committers_approval: 'true'
-          comment: "The PR is ready to be merged. No tests are needed!"
+          comment: >
+            The PR is likely ready to be merged. No tests are needed as no important environment files,
+            nor python files were modified by it. However, committers might decide that full test matrix is
+            needed and add the 'full tests needed' label. Then you should rebase it to the latest master
+            or amend the last commit of the PR, and push it with --force-with-lease.
       - name: Update Selective Build check
         uses: LouisBrunner/checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
         if: always()


[airflow] 43/44: Disable persisting credentials in Github Action's checkout (#13389)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 503c4d3e9a22bd85efc8fad9abbd33bb4a8e6db7
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Wed Dec 30 14:25:29 2020 +0100

    Disable persisting credentials in Github Action's checkout (#13389)
    
    This PR disables persisting credentials in Github Actions checkout.
    
    This is a result of discussion in builds@apache.org
    https://lists.apache.org/thread.html/r435c45dfc28ec74e28314aa9db8a216a2b45ff7f27b15932035d3f65%40%3Cbuilds.apache.org%3E
    
    It turns out that contrary to the documentation actios (specifically
    checkout action) can use GITHUB_TOKEN without specifying it as
    input in the yaml file and the GitHub checkout action
    leaves the repository with credentials stored locally that
    enable pushing to Github Repository by any step in the same
    job. This was thought to be forbidden initially (and the
    documentation clearly says that the action must have the
    GITHUB_TOKEN passed to it in .yaml workflow in order to
    use it). But apparently it behaves differently.
    
    This leaves open an attack vector where for example
    any PIP package installed in the following steps could push
    any changes to GitHub Repository of Apache Airflow.
    
    Security incidents have been reported to both GitHub and
    Apache Security team, but in the meantime we add configuration
    to remove credentials after checkout step.
    
    https://docs.github.com/en/free-pro-team@latest/actions/reference/authentication-in-a-workflow#using-the-github_token-in-a-workflow
    
    > Using the GITHUB_TOKEN in a workflow
    
    > To use the GITHUB_TOKEN secret, you *must* reference it in your workflow
      file. Using a token might include passing the token as an input to an
      action that requires it, or making authenticated GitHub API calls.
    
    (cherry picked from commit d079b913d283378dca37dc9ea25b04186d3e326c)
---
 .github/workflows/build-images-workflow-run.yml    |  7 ++++
 .github/workflows/ci.yml                           | 41 +++++++++++++++++++++-
 .github/workflows/codeql-analysis.yml              |  2 ++
 .../workflows/label_when_reviewed_workflow_run.yml |  5 +++
 .github/workflows/scheduled_quarantined.yml        |  2 ++
 5 files changed, 56 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/build-images-workflow-run.yml b/.github/workflows/build-images-workflow-run.yml
index 778ff54..bed5dfc 100644
--- a/.github/workflows/build-images-workflow-run.yml
+++ b/.github/workflows/build-images-workflow-run.yml
@@ -207,6 +207,8 @@ jobs:
         if: needs.cancel-workflow-runs.outputs.sourceEvent  == 'pull_request'
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: >
           Event: ${{ needs.cancel-workflow-runs.outputs.sourceEvent }}
           Repo: ${{ needs.cancel-workflow-runs.outputs.sourceHeadRepo }}
@@ -224,10 +226,13 @@ jobs:
         with:
           ref: ${{ needs.cancel-workflow-runs.outputs.targetCommitSha }}
           fetch-depth: 2
+          persist-credentials: false
         if: needs.cancel-workflow-runs.outputs.sourceEvent  == 'pull_request'
       # checkout the master version again, to use the right script in master workflow
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: Selective checks
         id: selective-checks
         env:
@@ -279,6 +284,7 @@ jobs:
         uses: actions/checkout@v2
         with:
           ref: ${{ needs.cancel-workflow-runs.outputs.targetCommitSha }}
+          persist-credentials: false
       - name: "Retrieve DEFAULTS from the _initialization.sh"
         # We cannot "source" the script here because that would be a security problem (we cannot run
         # any code that comes from the sources coming from the PR. Therefore we extract the
@@ -323,6 +329,7 @@ jobs:
         with:
           path: "main-airflow"
           ref: "${{ needs.cancel-workflow-runs.outputs.targetBranch }}"
+          persist-credentials: false
         if: steps.defaults.outputs.proceed == 'true'
       - name: "Setup python"
         uses: actions/setup-python@v2
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 7902f6c..728dfb0 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -105,6 +105,8 @@ jobs:
           token: ${{ secrets.GITHUB_TOKEN }}
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: >
           Event: ${{ github.event_name }}
           Repo: ${{ steps.source-run-info.outputs.sourceHeadRepo }}
@@ -126,6 +128,7 @@ jobs:
         with:
           ref: ${{ github.sha }}
           fetch-depth: 2
+          persist-credentials: false
         if: github.event_name  == 'pull_request'
       - name: Selective checks
         id: selective-checks
@@ -155,6 +158,8 @@ jobs:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
         if: needs.build-info.outputs.waitForImage == 'true'
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -192,6 +197,8 @@ jobs:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
         if: needs.build-info.outputs.waitForImage == 'true'
+        with:
+          persist-credentials: false
       - name: "Free space"
         run: ./scripts/ci/tools/ci_free_space_on_ci.sh
         if: needs.build-info.outputs.waitForImage == 'true'
@@ -207,13 +214,15 @@ jobs:
     runs-on: ubuntu-20.04
     needs: [build-info, ci-images]
     env:
-      SKIP: "pylint,identity"
+      SKIP: "identity"
       MOUNT_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
     if: needs.build-info.outputs.basic-checks-only == 'false'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -250,6 +259,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -267,6 +278,7 @@ jobs:
         with:
           ref: ${{ github.sha }}
           fetch-depth: 2
+          persist-credentials: false
       - name: "Static checks: basic checks only"
         run: |
           ./scripts/ci/static_checks/run_basic_static_checks.sh "${{ github.sha }}"
@@ -282,6 +294,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Build docs"
@@ -311,6 +325,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -367,6 +383,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -422,6 +440,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -474,6 +494,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -531,6 +553,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -618,6 +642,8 @@ jobs:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
         if: needs.build-info.outputs.waitForImage == 'true'
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -649,6 +675,8 @@ jobs:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
         if: needs.build-info.outputs.waitForImage == 'true'
+        with:
+          persist-credentials: false
       - name: "Free space"
         run: ./scripts/ci/tools/ci_free_space_on_ci.sh
         if: needs.build-info.outputs.waitForImage == 'true'
@@ -683,6 +711,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -762,6 +792,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -800,6 +832,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -830,6 +864,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -867,6 +903,8 @@ jobs:
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: "Set constraints branch name"
         id: constraints-branch
         run: ./scripts/ci/constraints/ci_branch_constraints.sh
@@ -875,6 +913,7 @@ jobs:
         with:
           path: "repo"
           ref: ${{ steps.constraints-branch.outputs.branch }}
+          persist-credentials: false
       - name: "Get all artifacts (constraints)"
         uses: actions/download-artifact@v2
         with:
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 9fa7b94..8bdd809 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -36,6 +36,7 @@ jobs:
         uses: actions/checkout@v2
         with:
           fetch-depth: 2
+          persist-credentials: false
       - name: Selective checks
         id: selective-checks
         env:
@@ -68,6 +69,7 @@ jobs:
           # We must fetch at least the immediate parents so that if this is
           # a pull request then we can checkout the head.
           fetch-depth: 2
+          persist-credentials: false
         if: |
           matrix.language == 'python' && needs.selective-checks.outputs.needs-python-scans == 'true' ||
           matrix.language == 'javascript' && needs.selective-checks.outputs.needs-javascript-scans == 'true'
diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml
index d66472b..4400bae 100644
--- a/.github/workflows/label_when_reviewed_workflow_run.yml
+++ b/.github/workflows/label_when_reviewed_workflow_run.yml
@@ -51,6 +51,8 @@ jobs:
             "}
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: >
           Event: ${{ steps.source-run-info.outputs.sourceEvent }}
           Repo: ${{ steps.source-run-info.outputs.sourceHeadRepo }}
@@ -68,9 +70,12 @@ jobs:
         with:
           ref: ${{ steps.source-run-info.outputs.targetCommitSha }}
           fetch-depth: 2
+          persist-credentials: false
       # checkout the master version again, to use the right script in master workflow
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - name: Selective checks
         id: selective-checks
         env:
diff --git a/.github/workflows/scheduled_quarantined.yml b/.github/workflows/scheduled_quarantined.yml
index 9877e4c..3071693 100644
--- a/.github/workflows/scheduled_quarantined.yml
+++ b/.github/workflows/scheduled_quarantined.yml
@@ -79,6 +79,8 @@ jobs:
       needs.trigger-tests.outputs.run-tests == 'true' || github.event_name != 'pull_request'
     steps:
       - uses: actions/checkout@v2
+        with:
+          persist-credentials: false
       - uses: actions/setup-python@v2
         with:
           python-version: '3.7'


[airflow] 12/44: Builds prod images on DockerHub from packages (#12908)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 6e3c049337275c097b1221c4e61d5ddac7832798
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Tue Dec 8 12:45:03 2020 +0100

    Builds prod images on DockerHub from packages (#12908)
    
    This build combines building both CI and PROD image in one
    script execution on DockerHub per python version.
    
    First the CI image is build and secondly, the image is used
    to build all the packages from sources and then those
    packages are used to build the PROD image.
    
    Resulting image will be package image built from latest sources.
    
    Closes: #12261
    (cherry picked from commit f9e9ad2b096ff9d8ee78224333f799ca3968b6bd)
---
 scripts/ci/images/ci_build_dockerhub.sh          | 64 ++++++++++++++----------
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh | 16 +++---
 scripts/ci/libraries/_build_images.sh            | 33 ++++++++++++
 scripts/ci/libraries/_initialization.sh          |  9 ++++
 4 files changed, 90 insertions(+), 32 deletions(-)

diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/images/ci_build_dockerhub.sh
index f176448..a0ad0e6 100755
--- a/scripts/ci/images/ci_build_dockerhub.sh
+++ b/scripts/ci/images/ci_build_dockerhub.sh
@@ -16,11 +16,14 @@
 # specific language governing permissions and limitations
 # under the License.
 
+# shellcheck disable=SC2030,SC2031
+
 # This is hook build used by DockerHub. We are also using it
 # on CI to potentially rebuild (and refresh layers that
 # are not cached) Docker images that are used to run CI jobs
 export FORCE_ANSWER_TO_QUESTIONS="yes"
 export VERBOSE_COMMANDS="true"
+export VERBOSE="true"
 
 : "${DOCKER_REPO:?"ERROR: Please specify DOCKER_REPO variable following the pattern HOST/DOCKERHUB_USER/DOCKERHUB_REPO"}"
 
@@ -35,48 +38,57 @@ echo "DOCKERHUB_USER=${DOCKERHUB_USER}"
 echo "DOCKERHUB_REPO=${DOCKERHUB_REPO}"
 echo
 
-: "${DOCKER_TAG:?"ERROR: Please specify DOCKER_TAG variable following the pattern BRANCH-pythonX.Y[-ci]"}"
+: "${DOCKER_TAG:?"ERROR: Please specify DOCKER_TAG variable following the pattern BRANCH-pythonX.Y"}"
 
 echo "DOCKER_TAG=${DOCKER_TAG}"
 
-[[ ${DOCKER_TAG:=} =~ ${DEFAULT_BRANCH}-python([0-9.]*)(.*) ]] && export PYTHON_MAJOR_MINOR_VERSION=${BASH_REMATCH[1]}
+[[ ${DOCKER_TAG:=} =~ ${DEFAULT_BRANCH}-python([0-9.]*) ]] && export PYTHON_MAJOR_MINOR_VERSION=${BASH_REMATCH[1]}
 
-: "${PYTHON_MAJOR_MINOR_VERSION:?"The tag '${DOCKER_TAG}' should follow the pattern .*-pythonX.Y[-ci]"}"
+: "${PYTHON_MAJOR_MINOR_VERSION:?"The tag '${DOCKER_TAG}' should follow the pattern .*-pythonX.Y"}"
 
 echo "Detected PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION}"
 echo
 
-FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}"
-export FORCE_AIRFLOW_PROD_BASE_TAG
-
-readonly FORCE_AIRFLOW_PROD_BASE_TAG
-
-if [[ "${FORCE_AIRFLOW_PROD_BASE_TAG}" =~ [0-9].* ]]; then
-    # Disable cache if we are building a tagged version
-    export DOCKER_CACHE="disabled"
-fi
-
-# shellcheck source=scripts/ci/libraries/_script_init.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+(
+    export INSTALL_FROM_PYPI="true"
+    export INSTALL_FROM_DOCKER_CONTEXT_FILES="false"
+    export INSTALL_PROVIDERS_FROM_SOURCES="true"
+    export AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
+    export DOCKER_CACHE="pulled"
+    # shellcheck source=scripts/ci/libraries/_script_init.sh
+    . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
-if [[ ${DOCKER_TAG} == *python*-ci ]]; then
     echo
-    echo "Building CI image"
+    echo "Building and pushing CI image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
     echo
     rm -rf "${BUILD_CACHE_DIR}"
     build_images::prepare_ci_build
     build_images::rebuild_ci_image_if_needed
-    push_pull_remove_images::push_ci_images
-elif [[ ${DOCKER_TAG} == *python* ]]; then
+    if [[ ! "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
+        # Do not push if we are building a tagged version
+        push_pull_remove_images::push_ci_images
+    fi
+)
+
+(
+    export INSTALL_FROM_PYPI="false"
+    export INSTALL_FROM_DOCKER_CONTEXT_FILES="true"
+    export INSTALL_PROVIDERS_FROM_SOURCES="false"
+    export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
+    export DOCKER_CACHE="pulled"
+
+    if [[ "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
+        # Disable cache and set name of the tag as image name if we are building a tagged version
+        export DOCKER_CACHE="disabled"
+        export FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}"
+    fi
+    # shellcheck source=scripts/ci/libraries/_script_init.sh
+    . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
     echo
-    echo "Building prod image"
+    echo "Building and pushing PROD image for ${PYTHON_MAJOR_MINOR_VERSION} in a sub-process"
     echo
     rm -rf "${BUILD_CACHE_DIR}"
     build_images::prepare_prod_build
-    build_images::build_prod_images
+    build_images::build_prod_images_from_packages
     push_pull_remove_images::push_prod_images
-else
-    echo
-    echo "Skipping the build in Dockerhub. The tag is not good: ${DOCKER_TAG}"
-    echo
-fi
+)
diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
index d38182b..700487c 100755
--- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
@@ -15,6 +15,15 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
+export INSTALL_FROM_PYPI="false"
+export INSTALL_FROM_DOCKER_CONTEXT_FILES="true"
+export INSTALL_PROVIDERS_FROM_SOURCES="false"
+export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
+export DOCKER_CACHE="local"
+export VERBOSE="true"
+
+
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
 
@@ -23,9 +32,6 @@
 function build_prod_images_on_ci() {
     build_images::prepare_prod_build
 
-    rm -rf "${BUILD_CACHE_DIR}"
-    mkdir -pv "${BUILD_CACHE_DIR}"
-
     if [[ ${USE_GITHUB_REGISTRY} == "true" && ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then
 
         # Tries to wait for the image indefinitely
@@ -34,10 +40,8 @@ function build_prod_images_on_ci() {
         build_images::wait_for_image_tag "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}" \
             ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${AIRFLOW_PROD_IMAGE}"
 
-        build_images::wait_for_image_tag "${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE}" \
-            ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${AIRFLOW_PROD_BUILD_IMAGE}"
     else
-        build_images::build_prod_images
+        build_images::build_prod_images_from_packages
     fi
 
 
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index d054f15..8f48c16 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -856,6 +856,39 @@ function build_images::determine_docker_cache_strategy() {
 }
 
 
+function build_images::build_prod_images_from_packages() {
+    # Cleanup dist and docker-context-files folders
+    mkdir -pv "${AIRFLOW_SOURCES}/dist"
+    mkdir -pv "${AIRFLOW_SOURCES}/docker-context-files"
+    rm -f "${AIRFLOW_SOURCES}/dist/"*.{whl,tar.gz}
+    rm -f "${AIRFLOW_SOURCES}/docker-context-files/"*.{whl,tar.gz}
+
+    pip_download_command="pip download -d /dist '.[${INSTALLED_EXTRAS}]' --constraint 'https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt'"
+
+    # Download all dependencies needed
+    docker run --rm --entrypoint /bin/bash \
+        "${EXTRA_DOCKER_FLAGS[@]}" \
+        "${AIRFLOW_CI_IMAGE}" -c "${pip_download_command}"
+
+    # Remove all downloaded apache airflow packages
+    rm -f "${AIRFLOW_SOURCES}/dist/"apache_airflow*.whl
+    rm -f "${AIRFLOW_SOURCES}/dist/"apache-airflow*.tar.gz
+
+    # Remove all downloaded apache airflow packages
+    mv -f "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/"
+
+    # Build apache airflow packages
+    build_airflow_packages::build_airflow_packages
+
+    # Remove generated tar.gz packages
+    rm -f "${AIRFLOW_SOURCES}/dist/"apache-airflow*.tar.gz
+
+    # move the packages to docker-context-files folder
+    mkdir -pv "${AIRFLOW_SOURCES}/docker-context-files"
+    mv "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/"
+    build_images::build_prod_images
+}
+
 # Useful information for people who stumble upon a pip check failure
 function build_images::inform_about_pip_check() {
         echo """
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 9c3ab67..5f2ec6d 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -144,6 +144,15 @@ function initialization::initialize_base_variables() {
     # If no Airflow Home defined - fallback to ${HOME}/airflow
     AIRFLOW_HOME_DIR=${AIRFLOW_HOME:=${HOME}/airflow}
     export AIRFLOW_HOME_DIR
+
+    export INSTALLED_EXTRAS="async,amazon,celery,kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,imap,google,azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
+    readonly INSTALLED_EXTRAS
+
+    PIP_VERSION="20.2.4"
+    export PIP_VERSION
+
+    WHEEL_VERSION="0.35.1"
+    export WHEEL_VERSION
 }
 
 # Determine current branch


[airflow] 09/44: Adds airflow as viable docker command in official image (#12878)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ece02997bae84abbe5772036055005cc37571728
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Mon Dec 7 15:17:49 2020 +0100

    Adds airflow as viable docker command in official image (#12878)
    
    The change is backwards-compatible. It still allows to pass airflow
    command without "airflow" as first parameter, but you can now
    also pass "airflow" and the rest of the parameters will
    be treated as "airflow" command parameters.
    
    Documentation is updated to reflect the entrypoint behaviour
    including _CMD option in SQL connections.
    
    Part of #12762 and #12602
    
    Partially extracted from  #12766
    
    (cherry picked from commit 4d44faac77b639a19379da714bf532ceb9416a1b)
---
 docs/production-deployment.rst               | 37 +++++++++++++++++++---------
 scripts/in_container/prod/entrypoint_prod.sh | 22 +++++++++++------
 2 files changed, 39 insertions(+), 20 deletions(-)

diff --git a/docs/production-deployment.rst b/docs/production-deployment.rst
index ac6c76d..7964b34 100644
--- a/docs/production-deployment.rst
+++ b/docs/production-deployment.rst
@@ -323,20 +323,20 @@ The PROD image entrypoint works as follows:
   This is in order to accommodate the
   `OpenShift Guidelines <https://docs.openshift.com/enterprise/3.0/creating_images/guidelines.html>`_
 
-* If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is passed to the container and it is either mysql or postgres
-  SQL alchemy connection, then the connection is checked and the script waits until the database is reachable.
-
-* If no ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is set or if it is set to sqlite SQL alchemy connection
-  then db reset is executed.
-
-* If ``AIRFLOW__CELERY__BROKER_URL`` variable is passed and scheduler, worker of flower command is used then
-  the connection is checked and the script waits until the Celery broker database is reachable.
-
 * The ``AIRFLOW_HOME`` is set by default to ``/opt/airflow/`` - this means that DAGs
   are in default in the ``/opt/airflow/dags`` folder and logs are in the ``/opt/airflow/logs``
 
 * The working directory is ``/opt/airflow`` by default.
 
+* If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is passed to the container and it is either mysql or postgres
+  SQL alchemy connection, then the connection is checked and the script waits until the database is reachable.
+  If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD`` variable is passed to the container, it is evaluated as a
+  command to execute and result of this evaluation is used as ``AIRFLOW__CORE__SQL_ALCHEMY_CONN``. The
+  ``_CMD`` variable takes precedence over the ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable.
+
+* If no ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is set then SQLite database is created in
+  ${AIRFLOW_HOME}/airflow.db and db reset is executed.
+
 * If first argument equals to "bash" - you are dropped to a bash shell or you can executes bash command
   if you specify extra arguments. For example:
 
@@ -349,7 +349,6 @@ The PROD image entrypoint works as follows:
   drwxr-xr-x 2 airflow root 4096 Jun  5 18:12 dags
   drwxr-xr-x 2 airflow root 4096 Jun  5 18:12 logs
 
-
 * If first argument is equal to "python" - you are dropped in python shell or python commands are executed if
   you pass extra parameters. For example:
 
@@ -358,13 +357,27 @@ The PROD image entrypoint works as follows:
   > docker run -it apache/airflow:master-python3.6 python -c "print('test')"
   test
 
-* If there are any other arguments - they are passed to "airflow" command
+* If first argument equals to "airflow" - the rest of the arguments is treated as an airflow command
+  to execute. Example:
 
 .. code-block:: bash
 
-  > docker run -it apache/airflow:master-python3.6
+   docker run -it apache/airflow:master-python3.6 airflow webserver
+
+* If there are any other arguments - they are simply passed to the "airflow" command
+
+.. code-block:: bash
+
+  > docker run -it apache/airflow:master-python3.6 version
   2.0.0.dev0
 
+* If ``AIRFLOW__CELERY__BROKER_URL`` variable is passed and airflow command with
+  scheduler, worker of flower command is used, then the script checks the broker connection
+  and waits until the Celery broker database is reachable.
+  If ``AIRFLOW__CELERY__BROKER_URL_CMD`` variable is passed to the container, it is evaluated as a
+  command to execute and result of this evaluation is used as ``AIRFLOW__CELERY__BROKER_URL``. The
+  ``_CMD`` variable takes precedence over the ``AIRFLOW__CELERY__BROKER_URL`` variable.
+
 Production image build arguments
 --------------------------------
 
diff --git a/scripts/in_container/prod/entrypoint_prod.sh b/scripts/in_container/prod/entrypoint_prod.sh
index 60103e7..0276e69 100755
--- a/scripts/in_container/prod/entrypoint_prod.sh
+++ b/scripts/in_container/prod/entrypoint_prod.sh
@@ -98,11 +98,11 @@ if ! whoami &> /dev/null; then
   export HOME="${AIRFLOW_USER_HOME_DIR}"
 fi
 
-
 # Warning: command environment variables (*_CMD) have priority over usual configuration variables
 # for configuration parameters that require sensitive information. This is the case for the SQL database
 # and the broker backend in this entrypoint script.
 
+
 if [[ -n "${AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD=}" ]]; then
     verify_db_connection "$(eval "$AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD")"
 else
@@ -111,6 +111,19 @@ else
     verify_db_connection "${AIRFLOW__CORE__SQL_ALCHEMY_CONN}"
 fi
 
+# The Bash and python commands still should verify the basic connections so they are run after the
+# DB check but before the broker check
+if [[ ${AIRFLOW_COMMAND} == "bash" ]]; then
+   shift
+   exec "/bin/bash" "${@}"
+elif [[ ${AIRFLOW_COMMAND} == "python" ]]; then
+   shift
+   exec "python" "${@}"
+elif [[ ${AIRFLOW_COMMAND} == "airflow" ]]; then
+   AIRFLOW_COMMAND="${2}"
+   shift
+fi
+
 # Note: the broker backend configuration concerns only a subset of Airflow components
 if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|worker|flower)$ ]]; then
     if [[ -n "${AIRFLOW__CELERY__BROKER_URL_CMD=}" ]]; then
@@ -123,13 +136,6 @@ if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|worker|flower)$ ]]; then
     fi
 fi
 
-if [[ ${AIRFLOW_COMMAND} == "bash" ]]; then
-   shift
-   exec "/bin/bash" "${@}"
-elif [[ ${AIRFLOW_COMMAND} == "python" ]]; then
-   shift
-   exec "python" "${@}"
-fi
 
 # Run the command
 exec airflow "${@}"


[airflow] 44/44: Run "third party" github actions from submodules instead (#13514)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit c8d788346183ec306182b9f512bc5e834dd2aca2
Author: Ash Berlin-Taylor <as...@firemirror.com>
AuthorDate: Mon Jan 11 10:38:15 2021 +0000

    Run "third party" github actions from submodules instead (#13514)
    
    Rather than having to mirror all the repos we can instead use
    git submodules to pull in the third party actions we want to use - with
    recent(ish) changes in review for submodules on GitHub we still get the
    same "review/audit" visibility for changes, but this way we don't have
    to either "pollute" our repo with the actions code, nor do we have to
    maintain a fork of the third party action.
    
    (cherry picked from commit f115983550833b1516ce58e66b49fe4fb1a0eff7)
---
 .github/actions/cancel-workflow-runs               |  1 +
 .github/actions/checks-action                      |  1 +
 .github/actions/codecov-action                     |  1 +
 .github/actions/configure-aws-credentials          |  1 +
 .github/actions/get-workflow-origin                |  1 +
 .github/actions/github-push-action                 |  1 +
 .github/actions/label-when-approved-action         |  1 +
 .github/workflows/build-images-workflow-run.yml    | 57 ++++++++++++++--------
 .github/workflows/ci.yml                           | 22 ++++++---
 .../workflows/label_when_reviewed_workflow_run.yml | 24 ++++-----
 .gitmodules                                        | 21 ++++++++
 .pre-commit-config.yaml                            |  2 +-
 12 files changed, 94 insertions(+), 39 deletions(-)

diff --git a/.github/actions/cancel-workflow-runs b/.github/actions/cancel-workflow-runs
new file mode 160000
index 0000000..953e057
--- /dev/null
+++ b/.github/actions/cancel-workflow-runs
@@ -0,0 +1 @@
+Subproject commit 953e057dc81d3458935a18d1184c386b0f6b5738
diff --git a/.github/actions/checks-action b/.github/actions/checks-action
new file mode 160000
index 0000000..9f02872
--- /dev/null
+++ b/.github/actions/checks-action
@@ -0,0 +1 @@
+Subproject commit 9f02872da71b6f558c6a6f190f925dde5e4d8798
diff --git a/.github/actions/codecov-action b/.github/actions/codecov-action
new file mode 160000
index 0000000..1fc7722
--- /dev/null
+++ b/.github/actions/codecov-action
@@ -0,0 +1 @@
+Subproject commit 1fc7722ded4708880a5aea49f2bfafb9336f0c8d
diff --git a/.github/actions/configure-aws-credentials b/.github/actions/configure-aws-credentials
new file mode 160000
index 0000000..e97d7fb
--- /dev/null
+++ b/.github/actions/configure-aws-credentials
@@ -0,0 +1 @@
+Subproject commit e97d7fbc8e0e5af69631c13daa0f4b5a8d88165b
diff --git a/.github/actions/get-workflow-origin b/.github/actions/get-workflow-origin
new file mode 160000
index 0000000..588cc14
--- /dev/null
+++ b/.github/actions/get-workflow-origin
@@ -0,0 +1 @@
+Subproject commit 588cc14f9f1cdf1b8be3db816855e96422204fec
diff --git a/.github/actions/github-push-action b/.github/actions/github-push-action
new file mode 160000
index 0000000..40bf560
--- /dev/null
+++ b/.github/actions/github-push-action
@@ -0,0 +1 @@
+Subproject commit 40bf560936a8022e68a3c00e7d2abefaf01305a6
diff --git a/.github/actions/label-when-approved-action b/.github/actions/label-when-approved-action
new file mode 160000
index 0000000..4c5190f
--- /dev/null
+++ b/.github/actions/label-when-approved-action
@@ -0,0 +1 @@
+Subproject commit 4c5190fec5661e98d83f50bbd4ef9ebb48bd1194
diff --git a/.github/workflows/build-images-workflow-run.yml b/.github/workflows/build-images-workflow-run.yml
index bed5dfc..5c85cb4 100644
--- a/.github/workflows/build-images-workflow-run.yml
+++ b/.github/workflows/build-images-workflow-run.yml
@@ -56,14 +56,19 @@ jobs:
       cacheDirective: ${{ steps.cache-directive.outputs.docker-cache }}
       buildImages: ${{ steps.build-images.outputs.buildImages }}
     steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          persist-credentials: false
+          submodules: recursive
       - name: "Get information about the original trigger of the run"
-        uses: apache/airflow-get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
+        uses: ./.github/actions/get-workflow-origin
         id: source-run-info
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           sourceRunId: ${{ github.event.workflow_run.id }}
       - name: "Cancel duplicated 'CI Build' runs"
-        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: ./.github/actions/cancel-workflow-runs
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           cancelMode: allDuplicates
@@ -80,7 +85,7 @@ jobs:
         # trick ¯\_(ツ)_/¯. We name the build-info job appropriately
         # and then we try to find and cancel all the jobs with the same Event + Repo + Branch as the
         # current Event/Repo/Branch combination.
-        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: ./.github/actions/cancel-workflow-runs
         with:
           cancelMode: namedJobs
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -96,7 +101,7 @@ jobs:
         # We also produce list of canceled "CI Build' runs as output, so that we
         # can cancel all the matching "Build Images" workflow runs in the two following steps.
         # Yeah. Adding to the complexity ¯\_(ツ)_/¯.
-        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: ./.github/actions/cancel-workflow-runs
         id: cancel-failed
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -129,14 +134,14 @@ jobs:
         # it to cancel any jobs that have matching names containing Source Run Id:
         # followed by one of the run ids. Yes I know it's super complex ¯\_(ツ)_/¯.
         if: env.BUILD_IMAGES == 'true' && steps.cancel-failed.outputs.cancelledRuns != '[]'
-        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: ./.github/actions/cancel-workflow-runs
         with:
           cancelMode: namedJobs
           token: ${{ secrets.GITHUB_TOKEN }}
           notifyPRCancel: true
           jobNameRegexps: ${{ steps.extract-cancelled-failed-runs.outputs.matching-regexp }}
       - name: "Cancel duplicated 'CodeQL' runs"
-        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: ./.github/actions/cancel-workflow-runs
         id: cancel
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -156,7 +161,7 @@ jobs:
         # trick ¯\_(ツ)_/¯. We name the build-info job appropriately and then we try to match
         # all the jobs with the same Event + Repo + Branch match and cancel all the duplicates for those
         # This might cancel own run, so this is the last step in the job
-        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: ./.github/actions/cancel-workflow-runs
         with:
           cancelMode: allDuplicatedNamedJobs
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -307,8 +312,19 @@ jobs:
           else
               echo "::set-output name=proceed::false"
           fi
+      - name: >
+          Checkout "${{ needs.cancel-workflow-runs.outputs.targetBranch }}" branch to 'main-airflow' folder
+          to use ci/scripts from there.
+        uses: actions/checkout@v2
+        with:
+          path: "main-airflow"
+          ref: "${{ needs.cancel-workflow-runs.outputs.targetBranch }}"
+          persist-credentials: false
+          submodules: recursive
+        if: steps.defaults.outputs.proceed == 'true'
       - name: Initiate Github Checks for Building image
-        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        # Use the submodule from main, not the PR branch
+        uses: ./main-airflow/.github/actions/checks-action
         id: build-image-check
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -322,15 +338,6 @@ jobs:
             [Image Build](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})
             for details" }
         if: steps.defaults.outputs.proceed == 'true'
-      - name: >
-          Checkout "${{ needs.cancel-workflow-runs.outputs.targetBranch }}" branch to 'main-airflow' folder
-          to use ci/scripts from there.
-        uses: actions/checkout@v2
-        with:
-          path: "main-airflow"
-          ref: "${{ needs.cancel-workflow-runs.outputs.targetBranch }}"
-          persist-credentials: false
-        if: steps.defaults.outputs.proceed == 'true'
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
@@ -363,7 +370,7 @@ jobs:
         run: ./scripts/ci/images/ci_push_production_images.sh
         if: matrix.image-type == 'PROD' && steps.defaults.outputs.proceed == 'true'
       - name: Update Github Checks for Building image with status
-        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: ./main-airflow/.github/actions/checks-action
         if: always() && steps.defaults.outputs.proceed == 'true'
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -384,8 +391,13 @@ jobs:
     if: cancelled()
     needs: [build-images]
     steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          persist-credentials: false
+          submodules: recursive
       - name: "Canceling the 'CI Build' source workflow in case of failure!"
-        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: ./.github/actions/cancel-workflow-runs
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           cancelMode: self
@@ -399,8 +411,13 @@ jobs:
     if: failure()
     needs: [build-images]
     steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          persist-credentials: false
+          submodules: recursive
       - name: "Canceling the 'CI Build' source workflow in case of failure!"
-        uses: apache/airflow-cancel-workflow-runs@953e057dc81d3458935a18d1184c386b0f6b5738  # v4_7
+        uses: ./.github/actions/cancel-workflow-runs
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           cancelMode: self
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 728dfb0..e87564d 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -98,15 +98,16 @@ jobs:
       pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }}
       pullRequestLabels: ${{ steps.source-run-info.outputs.pullRequestLabels }}
     steps:
-      - name: "Get information about the PR"
-        uses: apache/airflow-get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
-        id: source-run-info
-        with:
-          token: ${{ secrets.GITHUB_TOKEN }}
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
         with:
           persist-credentials: false
+          submodules: recursive
+      - name: "Get information about the PR"
+        uses: ./.github/actions/get-workflow-origin
+        id: source-run-info
+        with:
+          token: ${{ secrets.GITHUB_TOKEN }}
       - name: >
           Event: ${{ github.event_name }}
           Repo: ${{ steps.source-run-info.outputs.sourceHeadRepo }}
@@ -296,6 +297,7 @@ jobs:
         uses: actions/checkout@v2
         with:
           persist-credentials: false
+          submodules: recursive
       - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Build docs"
@@ -617,6 +619,11 @@ jobs:
       - tests-mysql
       - tests-quarantined
     steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          persist-credentials: false
+          submodules: recursive
       - name: "Download all artifacts from the current build"
         uses: actions/download-artifact@v2
         with:
@@ -624,7 +631,7 @@ jobs:
       - name: "Removes unnecessary artifacts"
         run: ls ./coverage-files | grep -v coverage | xargs rm -rf
       - name: "Upload all coverage reports to codecov"
-        uses: apache/airflow-codecov-action@1fc7722ded4708880a5aea49f2bfafb9336f0c8d  # v1.1.1
+        uses: ./.github/actions/codecov-action
         with:
           directory: "./coverage-files"
 
@@ -905,6 +912,7 @@ jobs:
         uses: actions/checkout@v2
         with:
           persist-credentials: false
+          submodules: recursive
       - name: "Set constraints branch name"
         id: constraints-branch
         run: ./scripts/ci/constraints/ci_branch_constraints.sh
@@ -921,7 +929,7 @@ jobs:
       - name: "Commit changed constraint files for ${{needs.build-info.outputs.pythonVersions}}"
         run: ./scripts/ci/constraints/ci_commit_constraints.sh
       - name: "Push changes"
-        uses: apache/airflow-github-push-action@40bf560936a8022e68a3c00e7d2abefaf01305a6  # v0.6.0
+        uses: ./.github/actions/github-push-action
         with:
           github_token: ${{ secrets.GITHUB_TOKEN }}
           branch: ${{ steps.constraints-branch.outputs.branch }}
diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml
index 4400bae..1ed50dd 100644
--- a/.github/workflows/label_when_reviewed_workflow_run.yml
+++ b/.github/workflows/label_when_reviewed_workflow_run.yml
@@ -29,14 +29,19 @@ jobs:
     outputs:
       labelSet: ${{ steps.label-when-reviewed.outputs.labelSet }}
     steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          persist-credentials: false
+          submodules: recursive
       - name: "Get information about the original trigger of the run"
-        uses: apache/airflow-get-workflow-origin@588cc14f9f1cdf1b8be3db816855e96422204fec  # v1_3
+        uses: ./.github/actions/get-workflow-origin
         id: source-run-info
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
           sourceRunId: ${{ github.event.workflow_run.id }}
       - name: Initiate Selective Build check
-        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: ./.github/actions/checks-action
         id: selective-build-check
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
@@ -49,10 +54,6 @@ jobs:
             "Checking selective status of the build in
             [the run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})
             "}
-      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
-        uses: actions/checkout@v2
-        with:
-          persist-credentials: false
       - name: >
           Event: ${{ steps.source-run-info.outputs.sourceEvent }}
           Repo: ${{ steps.source-run-info.outputs.sourceHeadRepo }}
@@ -76,6 +77,7 @@ jobs:
         uses: actions/checkout@v2
         with:
           persist-credentials: false
+          submodules: recursive
       - name: Selective checks
         id: selective-checks
         env:
@@ -91,7 +93,7 @@ jobs:
             ./scripts/ci/selective_ci_checks.sh
           fi
       - name: "Label when approved by committers for PRs that require full tests"
-        uses: apache/airflow-label-when-approved@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
+        uses: ./.github/actions/label-when-approved-action
         id: label-full-test-prs-when-approved-by-commiters
         if: >
           steps.selective-checks.outputs.run-tests == 'true' &&
@@ -107,7 +109,7 @@ jobs:
             If they don't merge it quickly - please rebase it to the latest master at your convenience,
             or amend the last commit of the PR, and push it with --force-with-lease.
       - name: "Initiate GitHub Check forcing rerun of SH ${{ github.event.pull_request.head.sha }}"
-        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: ./.github/actions/checks-action
         id: full-test-check
         if: steps.label-full-test-prs-when-approved-by-commiters.outputs.labelSet == 'true'
         with:
@@ -122,7 +124,7 @@ jobs:
             [the run](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})
             "}
       - name: "Label when approved by committers for PRs that do not require full tests"
-        uses: apache/airflow-label-when-approved@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
+        uses: ./.github/actions/label-when-approved-action
         id: label-simple-test-prs-when-approved-by-commiters
         if: >
           steps.selective-checks.outputs.run-tests == 'true' &&
@@ -139,7 +141,7 @@ jobs:
             'full tests needed'. Then you should rebase to the latest master or amend the last commit
             of the PR, and push it with --force-with-lease.
       - name: "Label when approved by committers for PRs that do not require tests at all"
-        uses: apache/airflow-label-when-approved@4c5190fec5661e98d83f50bbd4ef9ebb48bd1194  # v1.3
+        uses: ./.github/actions/label-when-approved-action
         id: label-no-test-prs-when-approved-by-commiters
         if: steps.selective-checks.outputs.run-tests != 'true'
         with:
@@ -153,7 +155,7 @@ jobs:
             needed and add the 'full tests needed' label. Then you should rebase it to the latest master
             or amend the last commit of the PR, and push it with --force-with-lease.
       - name: Update Selective Build check
-        uses: apache/airflow-checks-action@9f02872da71b6f558c6a6f190f925dde5e4d8798  # v1.1.0
+        uses: ./.github/actions/checks-action
         if: always()
         with:
           token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..1779ddd
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,21 @@
+[submodule ".github/actions/get-workflow-origin"]
+	path = .github/actions/get-workflow-origin
+	url = https://github.com/potiuk/get-workflow-origin
+[submodule ".github/actions/cancel-workflow-runs"]
+	path = .github/actions/cancel-workflow-runs
+	url = https://github.com/potiuk/cancel-workflow-runs
+[submodule ".github/actions/checks-action"]
+	path = .github/actions/checks-action
+	url = https://github.com/LouisBrunner/checks-action
+[submodule ".github/actions/configure-aws-credentials"]
+	path = .github/actions/configure-aws-credentials
+	url = https://github.com/aws-actions/configure-aws-credentials
+[submodule ".github/actions/codecov-action"]
+	path = .github/actions/codecov-action
+	url = https://github.com/codecov/codecov-action
+[submodule ".github/actions/github-push-action"]
+	path = .github/actions/github-push-action
+	url = https://github.com/ad-m/github-push-action
+[submodule ".github/actions/label-when-approved-action"]
+	path = .github/actions/label-when-approved-action
+	url = https://github.com/TobKed/label-when-approved-action
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2e6207d..a98b0ea 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -29,7 +29,7 @@ repos:
     rev: v1.1.9
     hooks:
       - id: forbid-tabs
-        exclude: ^docs/Makefile$|^clients/gen/go.sh
+        exclude: ^docs/Makefile$|^clients/gen/go.sh|\.gitmodules$
       - id: insert-license
         name: Add license for all SQL files
         files: \.sql$


[airflow] 14/44: Update CI to run tests againt v2-0-test branch (#10891)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit ba4858f9559b0a94a27445242e32396e622fc7a4
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Wed Dec 9 19:35:04 2020 +0000

    Update CI to run tests againt v2-0-test branch (#10891)
    
    (cherry picked from commit db166ba75c447a08b94e7be1ab09042fd6361581)
---
 .github/workflows/ci.yml                        |  18 ++-
 BREEZE.rst                                      |  89 +++++++-----
 CI.rst                                          |   8 +-
 breeze                                          | 176 ++++++++++++++++++------
 breeze-complete                                 |   1 +
 scripts/ci/constraints/ci_branch_constraints.sh |   2 +
 6 files changed, 209 insertions(+), 85 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 44ffc7b..ccfd50f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -21,9 +21,9 @@ on:  # yamllint disable-line rule:truthy
   schedule:
     - cron: '28 0 * * *'
   push:
-    branches: ['master', 'v1-10-test', 'v1-10-stable']
+    branches: ['master', 'v1-10-test', 'v1-10-stable', 'v2-0-test']
   pull_request:
-    branches: ['master', 'v1-10-test', 'v1-10-stable']
+    branches: ['master', 'v1-10-test', 'v1-10-stable', 'v2-0-test']
 
 env:
 
@@ -745,7 +745,8 @@ jobs:
       - verify-prod-images
       - docs
     if: >
-      (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test') &&
+      (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' ||
+      github.ref == 'refs/heads/v2-0-test') &&
       github.event_name != 'schedule'
     strategy:
       matrix:
@@ -782,7 +783,8 @@ jobs:
       - verify-ci-images
       - docs
     if: >
-      (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' ) &&
+      (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' ||
+      github.ref == 'refs/heads/v2-0-test') &&
       github.event_name != 'schedule'
     strategy:
       matrix:
@@ -817,7 +819,9 @@ jobs:
       - ci-images
     env:
       PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }}
-    if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test'
+    if: >
+      github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' ||
+      github.ref == 'refs/heads/v2-0-test'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
@@ -852,7 +856,9 @@ jobs:
       - tests-mysql
       - tests-postgres
       - tests-kubernetes
-    if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test'
+    if: >
+      github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' ||
+      github.ref == 'refs/heads/v2-0-test'
     steps:
       - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
         uses: actions/checkout@v2
diff --git a/BREEZE.rst b/BREEZE.rst
index 095fe1b..43705f9 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1188,7 +1188,12 @@ This is the current syntax for  `./breeze <./breeze>`_:
           image building time in production image and at container entering time for CI image. One of:
 
                  1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 1.10.8 1.10.7 1.10.6 1.10.5 1.10.4 1.10.3
-                 1.10.2 wheel
+                 1.10.2 wheel none
+
+          When 'none' is used, you can install airflow from local packages. When building image,
+          airflow package should be added to 'docker-context-files' and
+          --install-from-docker-context-files flag should be used. When running an image, airflow
+          package should be added to dist folder and --install-packages-from-dist flag should be used.
 
   -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE
           If specified, installs Airflow directly from reference in GitHub. This happens at
@@ -1198,10 +1203,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
   --no-rbac-ui
           Disables RBAC UI when Airflow 1.10.* is installed.
 
-  --install-wheels
-          If specified it will look for wheel packages placed in dist folder and it will install the
-          wheels from there after installing Airflow. This is useful for testing backport
-          packages as well as in the future for testing provider packages for 2.0.
+  --install-packages-from-dist
+          If specified it will look for packages placed in dist folder and it will install the
+          packages after installing Airflow. This is useful for testing provider
+          packages.
 
   -I, --production-image
           Use production image for entering the environment and builds (not for tests).
@@ -1231,10 +1236,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
   --image-tag TAG
           Additional tag in the image.
 
-  --skip-installing-airflow-via-pip
-          Skips installing Airflow via PIP. If you use this flag and want to install
-          Airflow, you have to install Airflow from packages placed in
-          'docker-context-files' and use --add-local-pip-files flag.
+  --disable-pypi-when-building
+          Disable installing Airflow from pypi when building. If you use this flag and want
+          to install Airflow, you have to install it from packages placed in
+          'docker-context-files' and use --install-from-local-files-when-building flag.
 
   --additional-extras ADDITIONAL_EXTRAS
           Additional extras to pass to build images The default is no additional extras.
@@ -1287,13 +1292,13 @@ This is the current syntax for  `./breeze <./breeze>`_:
           in the form of '/docker-context-files/<NAME_OF_THE_FILE>'
 
   --disable-pip-cache
-          Disables GitHub PIP cache during the build. Useful if github is not reachable during build.
+          Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build.
 
-  --add-local-pip-wheels
+  --install-from-local-files-when-building
           This flag is used during image building. If it is used additionally to installing
-          Airflow from PyPI, the packages are installed from the .whl packages placed
+          Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed
           in the 'docker-context-files' folder. The same flag can be used during entering the image in
-          the CI image - in this case also the .whl files
+          the CI image - in this case also the .whl and .tar.gz files will be installed automatically
 
   -C, --force-clean-images
           Force build images with cache disabled. This will remove the pulled or build images
@@ -1758,10 +1763,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
   --image-tag TAG
           Additional tag in the image.
 
-  --skip-installing-airflow-via-pip
-          Skips installing Airflow via PIP. If you use this flag and want to install
-          Airflow, you have to install Airflow from packages placed in
-          'docker-context-files' and use --add-local-pip-files flag.
+  --disable-pypi-when-building
+          Disable installing Airflow from pypi when building. If you use this flag and want
+          to install Airflow, you have to install it from packages placed in
+          'docker-context-files' and use --install-from-local-files-when-building flag.
 
   --additional-extras ADDITIONAL_EXTRAS
           Additional extras to pass to build images The default is no additional extras.
@@ -1814,13 +1819,13 @@ This is the current syntax for  `./breeze <./breeze>`_:
           in the form of '/docker-context-files/<NAME_OF_THE_FILE>'
 
   --disable-pip-cache
-          Disables GitHub PIP cache during the build. Useful if github is not reachable during build.
+          Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build.
 
-  --add-local-pip-wheels
+  --install-from-local-files-when-building
           This flag is used during image building. If it is used additionally to installing
-          Airflow from PyPI, the packages are installed from the .whl packages placed
+          Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed
           in the 'docker-context-files' folder. The same flag can be used during entering the image in
-          the CI image - in this case also the .whl files
+          the CI image - in this case also the .whl and .tar.gz files will be installed automatically
 
   -C, --force-clean-images
           Force build images with cache disabled. This will remove the pulled or build images
@@ -2101,7 +2106,12 @@ This is the current syntax for  `./breeze <./breeze>`_:
           image building time in production image and at container entering time for CI image. One of:
 
                  1.10.13 1.10.12 1.10.11 1.10.10 1.10.9 1.10.8 1.10.7 1.10.6 1.10.5 1.10.4 1.10.3
-                 1.10.2 wheel
+                 1.10.2 wheel none
+
+          When 'none' is used, you can install airflow from local packages. When building image,
+          airflow package should be added to 'docker-context-files' and
+          --install-from-docker-context-files flag should be used. When running an image, airflow
+          package should be added to dist folder and --install-packages-from-dist flag should be used.
 
   -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE
           If specified, installs Airflow directly from reference in GitHub. This happens at
@@ -2111,10 +2121,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
   --no-rbac-ui
           Disables RBAC UI when Airflow 1.10.* is installed.
 
-  --install-wheels
-          If specified it will look for wheel packages placed in dist folder and it will install the
-          wheels from there after installing Airflow. This is useful for testing backport
-          packages as well as in the future for testing provider packages for 2.0.
+  --install-packages-from-dist
+          If specified it will look for packages placed in dist folder and it will install the
+          packages after installing Airflow. This is useful for testing provider
+          packages.
 
   ****************************************************************************************************
    Credentials
@@ -2151,10 +2161,10 @@ This is the current syntax for  `./breeze <./breeze>`_:
   --image-tag TAG
           Additional tag in the image.
 
-  --skip-installing-airflow-via-pip
-          Skips installing Airflow via PIP. If you use this flag and want to install
-          Airflow, you have to install Airflow from packages placed in
-          'docker-context-files' and use --add-local-pip-files flag.
+  --disable-pypi-when-building
+          Disable installing Airflow from pypi when building. If you use this flag and want
+          to install Airflow, you have to install it from packages placed in
+          'docker-context-files' and use --install-from-local-files-when-building flag.
 
   --additional-extras ADDITIONAL_EXTRAS
           Additional extras to pass to build images The default is no additional extras.
@@ -2207,13 +2217,13 @@ This is the current syntax for  `./breeze <./breeze>`_:
           in the form of '/docker-context-files/<NAME_OF_THE_FILE>'
 
   --disable-pip-cache
-          Disables GitHub PIP cache during the build. Useful if github is not reachable during build.
+          Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build.
 
-  --add-local-pip-wheels
+  --install-from-local-files-when-building
           This flag is used during image building. If it is used additionally to installing
-          Airflow from PyPI, the packages are installed from the .whl packages placed
+          Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed
           in the 'docker-context-files' folder. The same flag can be used during entering the image in
-          the CI image - in this case also the .whl files
+          the CI image - in this case also the .whl and .tar.gz files will be installed automatically
 
   -C, --force-clean-images
           Force build images with cache disabled. This will remove the pulled or build images
@@ -2292,6 +2302,17 @@ This is the current syntax for  `./breeze <./breeze>`_:
           Default: All
 
   ****************************************************************************************************
+   Flags for generation of the packages
+
+  -S, --version-suffix-for-pypi SUFFIX
+          Adds optional suffix to the version in the generated backport package. It can be used
+          to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
+
+  -N, --version-suffix-for-svn SUFFIX
+          Adds optional suffix to the generated names of package. It can be used to generate
+          rc1/rc2 ... versions of the packages to be uploaded to SVN.
+
+  ****************************************************************************************************
    Increase verbosity of the scripts
 
   -v, --verbose
diff --git a/CI.rst b/CI.rst
index fac9f0f..e087438 100644
--- a/CI.rst
+++ b/CI.rst
@@ -730,15 +730,15 @@ The image names follow the patterns:
 |              |                            | <COMMIT_SHA>                   | It contains only compiled libraries and minimal set of dependencies to run Airflow.        |
 +--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+
 
-* <BRANCH> might be either "master" or "v1-10-test"
-* <X.Y> - Python version (Major + Minor). For "master" it should be in ["3.6", "3.7", "3.8"]. For
+* <BRANCH> might be either "master" or "v1-10-test" or "v2-0-test"
+* <X.Y> - Python version (Major + Minor). For "master" and "v2-0-test" should be in ["3.6", "3.7", "3.8"]. For
   v1-10-test it should be in ["2.7", "3.5", "3.6". "3.7", "3.8"].
 * <RUN_ID> - GitHub Actions RUN_ID. You can get it from CI action job outputs (run id is printed in
   logs and displayed as part of the step name. All PRs belong to some RUN_ID and this way you can
   pull the very exact version of image used in that RUN_ID
-* <COMMIT_SHA> - for images that get merged to "master" of "v1-10-test" the images are also tagged
+* <COMMIT_SHA> - for images that get merged to "master", "v2-0-test" of "v1-10-test" the images are also tagged
   with the commit SHA of that particular commit. This way you can easily find the image that was used
-  for testing for that "master" or "v1-10-test" test run.
+  for testing for that "master", "v2-0-test" or "v1-10-test" test run.
 
 Reproducing CI Runs locally
 ===========================
diff --git a/breeze b/breeze
index d99ba02..e9a9557 100755
--- a/breeze
+++ b/breeze
@@ -57,7 +57,6 @@ export EXTRA_STATIC_CHECK_OPTIONS
 #    MAX_SCREEN_WIDTH
 #    SCREEN_WIDTH
 #    MOUNT_LOCAL_SOURCES
-#    MOUNT_FILES
 #    FORCE_PULL_IMAGES
 #    ENABLE_KIND_CLUSTER
 #    FORWARD_CREDENTIALS
@@ -98,9 +97,6 @@ function breeze::setup_default_breeze_constants() {
     # By default we mount local Airflow sources
     export MOUNT_LOCAL_SOURCES="true"
 
-    # By default we mount files folder
-    export MOUNT_FILES="true"
-
     # By default we only pull images if we do not have them locally.
     # This can be overridden by '--force-pull-images' flag
     export FORCE_PULL_IMAGES="false"
@@ -140,7 +136,7 @@ function breeze::setup_default_breeze_constants() {
 
     # if set to true, the ci image will look for wheel packages in dist folder and will install them
     # during entering the container
-    export INSTALL_WHEELS="false"
+    export INSTALL_PACKAGES_FROM_DIST="false"
 
     # Determines whether to force build without checking if it is needed
     # Can be overridden by '--force-build-images' flag.
@@ -638,19 +634,14 @@ function breeze::prepare_command_files() {
     local remove_sources_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml
     local forward_credentials_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml
 
-    local compose_ci_file=${main_ci_docker_compose_file}:${backend_docker_compose_file}
-    local compose_prod_file=${main_prod_docker_compose_file}:${backend_docker_compose_file}
+    local compose_ci_file=${main_ci_docker_compose_file}:${backend_docker_compose_file}:${files_docker_compose_file}
+    local compose_prod_file=${main_prod_docker_compose_file}:${backend_docker_compose_file}:${files_docker_compose_file}
 
     if [[ "${MOUNT_LOCAL_SOURCES}" != "false" ]]; then
         compose_ci_file=${compose_ci_file}:${local_docker_compose_file}
         compose_prod_file=${compose_prod_file}:${local_prod_docker_compose_file}
     fi
 
-    if [[ "${MOUNT_FILES}" != "false" ]]; then
-        compose_ci_file=${compose_ci_file}:${files_docker_compose_file}
-        compose_prod_file=${compose_prod_file}:${files_docker_compose_file}
-    fi
-
     if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then
         compose_ci_file=${compose_ci_file}:${forward_credentials_docker_compose_file}
         compose_prod_file=${compose_prod_file}:${forward_credentials_docker_compose_file}
@@ -940,10 +931,10 @@ function breeze::parse_arguments() {
             echo
             shift
             ;;
-        --skip-installing-airflow-via-pip)
-            export INSTALL_AIRFLOW_VIA_PIP="false"
+        --disable-pypi-when-building)
+            export INSTALL_FROM_PYPI="false"
             export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
-            echo "Skip installing airflow via PIP"
+            echo "Disable installing airflow from PYPI"
             shift
             ;;
         -E | --extras)
@@ -1027,8 +1018,8 @@ function breeze::parse_arguments() {
             export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
             shift
             ;;
-        --add-local-pip-wheels)
-            export AIRFLOW_LOCAL_PIP_WHEELS="true"
+        --install-from-local-files-when-building)
+            export INSTALL_FROM_DOCKER_CONTEXT_FILES="true"
             echo "Install wheels from local docker-context-files when building image"
             shift
             ;;
@@ -1057,7 +1048,7 @@ function breeze::parse_arguments() {
             ;;
         -c | --github-registry)
             echo
-            echo "Use github registry"
+            echo "Use GitHub registry"
             echo
             export USE_GITHUB_REGISTRY="true"
             shift
@@ -1066,7 +1057,7 @@ function breeze::parse_arguments() {
             echo
             echo "GitHub repository: ${2}"
             echo
-            echo "Using github registry."
+            echo "Using GitHub registry."
             echo
             export GITHUB_REPOSITORY="${2}"
             export USE_GITHUB_REGISTRY="true"
@@ -1076,9 +1067,10 @@ function breeze::parse_arguments() {
             echo
             echo "GitHub image id: ${2}"
             echo
-            echo "Force pulling the image, using github registry and skip mounting local sources."
+            echo "Force pulling the image, using GitHub registry and skip mounting local sources."
             echo "This is in order to get the exact same version as used in CI environment for SHA/RUN_ID!."
-            echo "You can specify --skip-mounting-local-sources to not mount local sources. "
+            echo "You can specify --skip-mounting-local-sources to not mount local sources to get exact. "
+            echo "behaviour as in the CI environment."
             echo
             export FORCE_PULL_IMAGES="true"
             export USE_GITHUB_REGISTRY="true"
@@ -1096,6 +1088,18 @@ function breeze::parse_arguments() {
             echo
             shift 2
             ;;
+        -S | --version-suffix-for-pypi)
+            export VERSION_SUFFIX_FOR_PYPI="${2}"
+            echo "Version suffix for PyPI ${VERSION_SUFFIX_FOR_PYPI}"
+            echo
+            shift 2
+            ;;
+        -N | --version-suffix-for-svn)
+            export VERSION_SUFFIX_FOR_SVN="${2}"
+            echo "Version suffix for SVN ${VERSION_SUFFIX_FOR_SVN}"
+            echo
+            shift 2
+            ;;
         --load-example-dags)
             export LOAD_EXAMPLES="true"
             echo "Include Airflow sample dags"
@@ -1120,12 +1124,18 @@ function breeze::parse_arguments() {
             echo
             shift
             ;;
-        --install-wheels)
-            export INSTALL_WHEELS="true"
-            echo "Install wheels found in dist folder during entering breeze."
+        --install-packages-from-dist)
+            export INSTALL_PACKAGES_FROM_DIST="true"
+            echo "Install packages found in dist folder when entering breeze."
             echo
             shift
             ;;
+        --package-format)
+            export PACKAGE_FORMAT="${2}"
+            echo "Selected package type: ${PACKAGE_FORMAT}"
+            echo
+            shift 2
+            ;;
         --test-type)
             export TEST_TYPE="${2}"
             echo "Selected test type: ${TEST_TYPE}"
@@ -1201,6 +1211,12 @@ function breeze::parse_arguments() {
             export UPGRADE_TO_LATEST_CONSTRAINTS="true"
             shift
             ;;
+        prepare-airflow-packages)
+            last_subcommand="${1}"
+            command_to_run="perform_prepare_airflow_packages"
+            export INSTALL_PROVIDERS_FROM_SOURCES="false"
+            shift
+            ;;
         push-image)
             last_subcommand="${1}"
             command_to_run="perform_push_image"
@@ -1432,9 +1448,13 @@ function breeze::prepare_formatted_versions() {
     readonly FORMATTED_DEFAULT_PROD_EXTRAS
 
     FORMATTED_TEST_TYPES=$(echo "${_breeze_allowed_test_types=""}" |
-        tr ',' ' ' | fold -w "${indented_screen_width}" -s | sed "s/ /,/g; s/^/${list_prefix}/")
+        fold -w "${indented_screen_width}" -s | sed "s/ /,/g; s/^/${list_prefix}/")
     readonly FORMATTED_TEST_TYPES
 
+    FORMATTED_PACKAGE_FORMATS=$(echo "${_breeze_allowed_package_formats=""}" |
+        fold -w "${indented_screen_width}" -s | sed "s/ /,/g; s/^/${list_prefix}/")
+    readonly FORMATTED_PACKAGE_FORMATS
+
 }
 
 #######################################################################################################
@@ -1470,6 +1490,8 @@ function breeze::prepare_usage() {
     readonly USAGE_GENERATE_CONSTRAINTS
     export USAGE_INITIALIZE_LOCAL_VIRTUALENV="Initializes local virtualenv"
     readonly USAGE_INITIALIZE_LOCAL_VIRTUALENV
+    export USAGE_PREPARE_AIRFLOW_PACKAGES="Prepares airflow packages"
+    readonly USAGE_PREPARE_AIRFLOW_PACKAGES
     export USAGE_PUSH_IMAGE="Pushes images to registry"
     readonly USAGE_PUSH_IMAGE
     export USAGE_KIND_CLUSTER="Manages KinD cluster on the host"
@@ -1641,6 +1663,23 @@ Flags:
 $(breeze::flag_airflow_variants)
 "
     readonly DETAILED_USAGE_INITIALIZE_LOCAL_VIRTUALENV
+    export DETAILED_USAGE_PREPARE_AIRFLOW_PACKAGES="
+${CMDNAME} prepare-airflow-packages [FLAGS]
+
+      Prepares airflow packages (sdist and wheel) in dist folder. Note that
+      prepare-provider-packages command cleans up the dist folder, so if you want also
+      to generate provider packages, make sure you run prepare-provider-packages first,
+      and prepare-airflow-packages second.
+
+      General form:
+
+      '${CMDNAME} prepare-airflow-packages
+
+Flags:
+$(breeze::flag_packages)
+$(breeze::flag_verbosity)
+"
+    readonly DETAILED_USAGE_PREPARE_AIRFLOW_PACKAGES
     export DETAILED_USAGE_PUSH_IMAGE="
 ${CMDNAME} push_image [FLAGS]
 
@@ -2124,6 +2163,11 @@ function breeze::flag_choose_different_airflow_version() {
 
 ${FORMATTED_INSTALL_AIRFLOW_VERSIONS}
 
+        When 'none' is used, you can install airflow from local packages. When building image,
+        airflow package should be added to 'docker-context-files' and
+        --install-from-docker-context-files flag should be used. When running an image, airflow
+        package should be added to dist folder and --install-packages-from-dist flag should be used.
+
 -t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE
         If specified, installs Airflow directly from reference in GitHub. This happens at
         image building time in production image and at container entering time for CI image.
@@ -2132,10 +2176,10 @@ ${FORMATTED_INSTALL_AIRFLOW_VERSIONS}
 --no-rbac-ui
         Disables RBAC UI when Airflow 1.10.* is installed.
 
---install-wheels
-        If specified it will look for wheel packages placed in dist folder and it will install the
-        wheels from there after installing Airflow. This is useful for testing backport
-        packages as well as in the future for testing provider packages for 2.0.
+--install-packages-from-dist
+        If specified it will look for packages placed in dist folder and it will install the
+        packages after installing Airflow. This is useful for testing provider
+        packages.
 "
 }
 
@@ -2176,6 +2220,29 @@ function breeze::flag_credentials() {
 
 #######################################################################################################
 #
+# Prints flags that control package preparation
+#
+# Outputs:
+#    Flag information.
+#######################################################################################################
+function breeze::flag_packages() {
+    echo "
+--package-format PACKAGE_FORMAT
+
+        Chooses format of packages to prepare.
+
+        One of:
+
+${FORMATTED_PACKAGE_FORMATS}
+
+        Default: ${_breeze_default_package_formats:=}
+
+"
+}
+
+
+#######################################################################################################
+#
 # Prints flags that control verbosity
 #
 # Outputs:
@@ -2244,10 +2311,10 @@ ${FORMATTED_DEFAULT_PROD_EXTRAS}
 --image-tag TAG
         Additional tag in the image.
 
---skip-installing-airflow-via-pip
-        Skips installing Airflow via PIP. If you use this flag and want to install
-        Airflow, you have to install Airflow from packages placed in
-        'docker-context-files' and use --add-local-pip-files flag.
+--disable-pypi-when-building
+        Disable installing Airflow from pypi when building. If you use this flag and want
+        to install Airflow, you have to install it from packages placed in
+        'docker-context-files' and use --install-from-local-files-when-building flag.
 
 --additional-extras ADDITIONAL_EXTRAS
         Additional extras to pass to build images The default is no additional extras.
@@ -2300,13 +2367,13 @@ Build options:
         in the form of '/docker-context-files/<NAME_OF_THE_FILE>'
 
 --disable-pip-cache
-        Disables GitHub PIP cache during the build. Useful if github is not reachable during build.
+        Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build.
 
---add-local-pip-wheels
+--install-from-local-files-when-building
         This flag is used during image building. If it is used additionally to installing
-        Airflow from PyPI, the packages are installed from the .whl packages placed
+        Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed
         in the 'docker-context-files' folder. The same flag can be used during entering the image in
-        the CI image - in this case also the .whl files
+        the CI image - in this case also the .whl and .tar.gz files will be installed automatically
 
 -C, --force-clean-images
         Force build images with cache disabled. This will remove the pulled or build images
@@ -2392,6 +2459,24 @@ function breeze::flag_pull_push_docker_images() {
 "
 }
 
+#######################################################################################################
+#
+# Prints flags that control version of generated packages
+#
+# Outputs:
+#    Flag information.
+#######################################################################################################
+function breeze::flag_version_suffix() {
+    echo "
+-S, --version-suffix-for-pypi SUFFIX
+        Adds optional suffix to the version in the generated backport package. It can be used
+        to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
+
+-N, --version-suffix-for-svn SUFFIX
+        Adds optional suffix to the generated names of package. It can be used to generate
+        rc1/rc2 ... versions of the packages to be uploaded to SVN.
+"
+}
 #####################################################################################################
 #
 # Prints flags that control how Airflow should be populated with the command start-airflow
@@ -2406,7 +2491,6 @@ function breeze::flag_start_airflow() {
 
 --load-default-connections
         Include Airflow Default Connections.
-
 "
 }
 
@@ -2518,6 +2602,10 @@ $(breeze::print_star_line)
 $(breeze::flag_tests)
 
 $(breeze::print_star_line)
+ Flags for generation of the packages
+$(breeze::flag_version_suffix)
+
+$(breeze::print_star_line)
  Increase verbosity of the scripts
 $(breeze::flag_verbosity)
 
@@ -2659,6 +2747,7 @@ function breeze::check_and_save_all_params() {
     parameters::check_and_save_allowed_param "MYSQL_VERSION" "Mysql version" "--mysql-version"
 
     parameters::check_allowed_param TEST_TYPE "Type of tests" "--test-type"
+    parameters::check_allowed_param PACKAGE_FORMAT "Format of packages to build" "--package-format"
 
 
     # Can't verify those - they can be anything, so let's just save them
@@ -2862,7 +2951,8 @@ function breeze::run_build_command() {
             build_images::rebuild_ci_image_if_needed
         fi
         ;;
-    build_docs | perform_static_checks | perform_generate_constraints )
+    build_docs | perform_static_checks | perform_generate_constraints | \
+        perform_prepare_airflow_packages)
         build_images::prepare_ci_build
         build_images::rebuild_ci_image_if_needed
         ;;
@@ -2891,8 +2981,9 @@ function breeze::run_build_command() {
             build_images::prepare_ci_build
         fi
         ;;
-    perform_initialize_local_virtualenv | perform_setup_autocomplete | toggle_suppress_cheatsheet | toggle_suppress_asciiart ) ;;
-
+    perform_initialize_local_virtualenv | perform_setup_autocomplete | \
+        toggle_suppress_cheatsheet | toggle_suppress_asciiart )
+        ;;
     manage_kind_cluster)
         if [[ ${KIND_CLUSTER_OPERATION} == "start" ]]; then
             echo "Starts KinD cluster"
@@ -3014,6 +3105,9 @@ function breeze::run_breeze_command() {
     perform_generate_constraints)
         runs::run_generate_constraints
         ;;
+    perform_prepare_airflow_packages)
+        build_airflow_packages::build_airflow_packages
+        ;;
     perform_push_image)
         if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
             push_pull_remove_images::push_prod_images
diff --git a/breeze-complete b/breeze-complete
index 505c6bc..df81093 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -62,6 +62,7 @@ _breeze_allowed_install_airflow_versions=$(cat <<-EOF
 1.10.3
 1.10.2
 wheel
+none
 EOF
 )
 
diff --git a/scripts/ci/constraints/ci_branch_constraints.sh b/scripts/ci/constraints/ci_branch_constraints.sh
index 4cc7227..6be2d84 100755
--- a/scripts/ci/constraints/ci_branch_constraints.sh
+++ b/scripts/ci/constraints/ci_branch_constraints.sh
@@ -22,6 +22,8 @@ if [[ ${GITHUB_REF} == 'refs/heads/master' ]]; then
   echo "::set-output name=branch::constraints-master"
 elif [[ ${GITHUB_REF} == 'refs/heads/v1-10-test' ]]; then
   echo "::set-output name=branch::constraints-1-10"
+elif [[ ${GITHUB_REF} == 'refs/heads/v2-0-test' ]]; then
+  echo "::set-output name=branch::constraints-2-0"
 else
   echo
   echo "Unexpected ref ${GITHUB_REF}. Exiting!"


[airflow] 21/44: Install airflow and providers from dist and verifies them (#13033)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0c8fe86bf99be74d700088cee3fe3044bb12e03c
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sat Dec 12 19:38:30 2020 +0100

    Install airflow and providers from dist and verifies them  (#13033)
    
    * Install airflow and providers from dist and verifies them
    
    This check is there to prevent problems similar to those reported
    in #13027 and fixed in #13031.
    
    Previously we always built airflow from wheels, only providers were
    installed from sdist packages and tested. In this version both
    airflow and providers are installed using the same package format
    (sdist or wheel).
    
    * Update scripts/in_container/entrypoint_ci.sh
    
    Co-authored-by: Kaxil Naik <ka...@gmail.com>
    
    Co-authored-by: Kaxil Naik <ka...@gmail.com>
    (cherry picked from commit abf2a4264b18e750dbc2eb384a86d08d821dfba4)
---
 .github/workflows/ci.yml                    |  8 ++-
 BREEZE.rst                                  |  4 +-
 Dockerfile                                  |  2 +-
 breeze-complete                             |  1 +
 docs/production-deployment.rst              |  1 +
 scripts/ci/images/ci_verify_ci_image.sh     |  2 +
 scripts/ci/images/ci_verify_prod_image.sh   |  6 +--
 scripts/ci/libraries/_build_images.sh       | 71 +++++++++++++++---------
 scripts/in_container/_in_container_utils.sh | 19 ++++++-
 scripts/in_container/entrypoint_ci.sh       | 83 +++++++++++++++++++++--------
 10 files changed, 139 insertions(+), 58 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 74670d3..bb06e0d 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -28,7 +28,6 @@ on:  # yamllint disable-line rule:truthy
 env:
 
   MOUNT_LOCAL_SOURCES: "false"
-  MOUNT_FILES: "true"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
@@ -42,6 +41,7 @@ env:
   GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
   GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ github.run_id }}"
   GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
+  INSTALL_PROVIDERS_FROM_SOURCES: "true"
 
   # You can switch between building the image in "Build Images" workflow or building them in CI workflow
   # Separately for each job.
@@ -195,6 +195,8 @@ jobs:
       - name: "Free space"
         run: ./scripts/ci/tools/ci_free_space_on_ci.sh
         if: needs.build-info.outputs.waitForImage == 'true'
+      - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
+        run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Verify CI image Py${{matrix.python-version}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_verify_ci_image.sh
         if: needs.build-info.outputs.waitForImage == 'true'
@@ -451,7 +453,7 @@ jobs:
           retention-days: 7
 
   tests-sqlite:
-    timeout-minutes: 60
+    timeout-minutes: 80
     name: >
       Sqlite Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}}
     runs-on: ubuntu-20.04
@@ -649,6 +651,8 @@ jobs:
       - name: "Free space"
         run: ./scripts/ci/tools/ci_free_space_on_ci.sh
         if: needs.build-info.outputs.waitForImage == 'true'
+      - name: "Prepare PROD Image"
+        run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh
       - name: "Verify PROD image Py${{matrix.python-version}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_verify_prod_image.sh
         if: needs.build-info.outputs.waitForImage == 'true'
diff --git a/BREEZE.rst b/BREEZE.rst
index 633fb4d..03290c3 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1254,7 +1254,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel
+                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
@@ -2209,7 +2209,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel
+                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
diff --git a/Dockerfile b/Dockerfile
index eecc683..a34b63e 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -255,7 +255,7 @@ RUN if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then \
         pip install --user ${ADDITIONAL_PYTHON_DEPS} --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}"; \
     fi; \
     if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \
-        if ls /docker-context-files/*.whl 1> /dev/null 2>&1; then \
+        if ls /docker-context-files/*.{whl,tar.gz} 1> /dev/null 2>&1; then \
             pip install --user --no-deps /docker-context-files/*.{whl,tar.gz}; \
         fi ; \
     fi; \
diff --git a/breeze-complete b/breeze-complete
index 819938b..cdf8fe9 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -58,6 +58,7 @@ _breeze_allowed_install_airflow_versions=$(cat <<-EOF
 1.10.9
 none
 wheel
+sdist
 EOF
 )
 
diff --git a/docs/production-deployment.rst b/docs/production-deployment.rst
index b0ffa02..22059ec 100644
--- a/docs/production-deployment.rst
+++ b/docs/production-deployment.rst
@@ -525,6 +525,7 @@ production image. There are three types of build:
 |                                   |                        | GitHub repository tag or branch or "." to install from sources.                   |
 |                                   |                        | Note that installing from local sources requires appropriate values of the        |
 |                                   |                        | ``AIRFLOW_SOURCES_FROM`` and ``AIRFLOW_SOURCES_TO`` variables as described below. |
+|                                   |                        | Only used when ``INSTALL_FROM_PYPI`` is set to ``true``.                          |
 +-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
 | ``AIRFLOW_INSTALL_VERSION``       |                        | Optional - might be used for package installation of different Airflow version    |
 |                                   |                        | for example"==1.10.14". For consistency, you should also set``AIRFLOW_VERSION``   |
diff --git a/scripts/ci/images/ci_verify_ci_image.sh b/scripts/ci/images/ci_verify_ci_image.sh
index 004eac0..831fd28 100755
--- a/scripts/ci/images/ci_verify_ci_image.sh
+++ b/scripts/ci/images/ci_verify_ci_image.sh
@@ -52,4 +52,6 @@ function pull_ci_image() {
 
 build_images::prepare_ci_build
 
+pull_ci_image
+
 verify_ci_image_dependencies
diff --git a/scripts/ci/images/ci_verify_prod_image.sh b/scripts/ci/images/ci_verify_prod_image.sh
index 274261b..30f9def 100755
--- a/scripts/ci/images/ci_verify_prod_image.sh
+++ b/scripts/ci/images/ci_verify_prod_image.sh
@@ -16,7 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 # shellcheck source=scripts/ci/libraries/_script_init.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh"
 
 function verify_prod_image_has_airflow {
     echo
@@ -52,7 +52,6 @@ function verify_prod_image_has_airflow {
     fi
 }
 
-
 function verify_prod_image_dependencies {
 
     echo
@@ -70,7 +69,7 @@ function verify_prod_image_dependencies {
         # exit ${res}
     else
         echo
-        echo " \e[32mOK. The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent.  ${COLOR_RESET}"
+        echo "${COLOR_GREEN_OK} The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent.  ${COLOR_RESET}"
         echo
     fi
     set -e
@@ -89,6 +88,7 @@ function pull_prod_image() {
 
 build_images::prepare_prod_build
 
+pull_prod_image
 
 verify_prod_image_has_airflow
 
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 296124f..d0e0213 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -117,10 +117,10 @@ function build_images::forget_last_answer() {
 }
 
 function build_images::confirm_via_terminal() {
-    echo > "${DETECTED_TERMINAL}"
-    echo > "${DETECTED_TERMINAL}"
-    echo "Make sure that you rebased to latest master before rebuilding!" > "${DETECTED_TERMINAL}"
-    echo > "${DETECTED_TERMINAL}"
+    echo >"${DETECTED_TERMINAL}"
+    echo >"${DETECTED_TERMINAL}"
+    echo "Make sure that you rebased to latest master before rebuilding!" >"${DETECTED_TERMINAL}"
+    echo >"${DETECTED_TERMINAL}"
     # Make sure to use output of tty rather than stdin/stdout when available - this way confirm
     # will works also in case of pre-commits (git does not pass stdin/stdout to pre-commit hooks)
     # shellcheck disable=SC2094
@@ -217,6 +217,31 @@ function build_images::confirm_image_rebuild() {
     fi
 }
 
+function build_images::confirm_non-empty-docker-context-files() {
+    local num_docker_context_files
+    num_docker_context_files=$(find "${AIRFLOW_SOURCES}/docker-context-files/" -type f |\
+        grep -c v "README.md" )
+    if [[ ${num_docker_context_files} == "0" ]]; then
+        if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "false" ]]; then
+            >&2 echo
+            >&2 echo "ERROR! You want to install packages from docker-context-files"
+            >&2 echo "       but there are no packages to install in this folder."
+            >&2 echo
+            exit 1
+        fi
+    else
+        if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "false" ]]; then
+            >&2 echo
+            >&2 echo "ERROR! There are some extra files in docker-context-files except README.md"
+            >&2 echo "       And you did not choose --install-from-docker-context-files flag"
+            >&2 echo "       This might result in unnecessary cache invalidation and long build times"
+            >&2 echo "       Exiting now - please remove those files (except README.md) and retry"
+            >&2 echo
+            exit 2
+        fi
+    fi
+}
+
 # Builds local image manifest
 # It contains only one .json file - result of docker inspect - describing the image
 # We cannot use docker registry APIs as they are available only with authorisation
@@ -251,8 +276,8 @@ function build_images::get_local_build_cache_hash() {
         return
     fi
     docker cp "local-airflow-ci-container:/build-cache-hash" \
-        "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}" 2> /dev/null \
-        || touch "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}"
+        "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}" 2>/dev/null ||
+        touch "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}"
     set -e
     verbosity::print_info
     verbosity::print_info "Local build cache hash: '$(cat "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}")'"
@@ -305,8 +330,8 @@ function build_images::compare_local_and_remote_build_cache_hash() {
     local local_hash
     local_hash=$(cat "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}")
 
-    if [[ ${remote_hash} != "${local_hash}" ||
-        ${local_hash} == "" ]]; then
+    if [[ ${remote_hash} != "${local_hash}" || ${local_hash} == "" ]] \
+        ; then
         echo
         echo
         echo "Your image and the dockerhub have different or missing build cache hashes."
@@ -370,7 +395,7 @@ function build_images::get_docker_image_names() {
     export BUILT_CI_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"
 
     # GitHub Registry names must be lowercase :(
-    github_repository_lowercase="$(echo "${GITHUB_REPOSITORY}" |tr '[:upper:]' '[:lower:]')"
+    github_repository_lowercase="$(echo "${GITHUB_REPOSITORY}" | tr '[:upper:]' '[:lower:]')"
     export GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE="${GITHUB_REGISTRY}/${github_repository_lowercase}/${AIRFLOW_PROD_BASE_TAG}"
     export GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE="${GITHUB_REGISTRY}/${github_repository_lowercase}/${AIRFLOW_PROD_BASE_TAG}-build"
     export GITHUB_REGISTRY_PYTHON_BASE_IMAGE="${GITHUB_REGISTRY}/${github_repository_lowercase}/python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
@@ -380,7 +405,7 @@ function build_images::get_docker_image_names() {
 }
 
 # If GitHub Registry is used, login to the registry using GITHUB_USERNAME and GITHUB_TOKEN
-function build_image::login_to_github_registry_if_needed()  {
+function build_image::login_to_github_registry_if_needed() {
     if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then
         if [[ -n ${GITHUB_TOKEN=} ]]; then
             echo "${GITHUB_TOKEN}" | docker login \
@@ -454,9 +479,8 @@ function build_images::rebuild_ci_image_if_needed() {
             echo "Checking if the remote image needs to be pulled"
             echo
             build_images::get_remote_image_build_cache_hash
-            if [[ ${REMOTE_DOCKER_REGISTRY_UNREACHABLE:=} != "true" && \
-                  ${LOCAL_MANIFEST_IMAGE_UNAVAILABLE:=} != "true" ]]; then
-                    build_images::compare_local_and_remote_build_cache_hash
+            if [[ ${REMOTE_DOCKER_REGISTRY_UNREACHABLE:=} != "true" && ${LOCAL_MANIFEST_IMAGE_UNAVAILABLE:=} != "true" ]]; then
+                build_images::compare_local_and_remote_build_cache_hash
             else
                 FORCE_PULL_IMAGES="true"
             fi
@@ -572,7 +596,7 @@ function build_images::build_ci_image() {
         )
     fi
 
-    if [[ -n ${SPIN_PID:=""} ]]; then
+    if [[ -n ${SPIN_PID=} ]]; then
         kill -HUP "${SPIN_PID}" || true
         wait "${SPIN_PID}" || true
         echo >"${DETECTED_TERMINAL}"
@@ -624,8 +648,8 @@ Docker building ${AIRFLOW_CI_IMAGE}.
         --build-arg ADDITIONAL_RUNTIME_APT_COMMAND="${ADDITIONAL_RUNTIME_APT_COMMAND}" \
         --build-arg ADDITIONAL_RUNTIME_APT_DEPS="${ADDITIONAL_RUNTIME_APT_DEPS}" \
         --build-arg ADDITIONAL_RUNTIME_APT_ENV="${ADDITIONAL_RUNTIME_APT_ENV}" \
-        --build-arg INSTALL_AIRFLOW_VIA_PIP="${INSTALL_AIRFLOW_VIA_PIP}" \
-        --build-arg AIRFLOW_LOCAL_PIP_WHEELS="${AIRFLOW_LOCAL_PIP_WHEELS}" \
+        --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
+        --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
         --build-arg UPGRADE_TO_LATEST_CONSTRAINTS="${UPGRADE_TO_LATEST_CONSTRAINTS}" \
         --build-arg BUILD_ID="${CI_BUILD_ID}" \
         --build-arg COMMIT_SHA="${COMMIT_SHA}" \
@@ -655,7 +679,7 @@ Docker building ${AIRFLOW_CI_IMAGE}.
 # DockerHub user etc. the variables are set so that other functions can use those variables.
 function build_images::prepare_prod_build() {
     if [[ -n "${INSTALL_AIRFLOW_REFERENCE=}" ]]; then
-        # When --install-airflow-reference is used then the image is build from github tag
+        # When --install-airflow-reference is used then the image is build from GitHub tag
         EXTRA_DOCKER_PROD_BUILD_FLAGS=(
             "--build-arg" "AIRFLOW_INSTALLATION_METHOD=https://github.com/apache/airflow/archive/${INSTALL_AIRFLOW_REFERENCE}.tar.gz#egg=apache-airflow"
         )
@@ -758,8 +782,8 @@ function build_images::build_prod_images() {
         --build-arg ADDITIONAL_DEV_APT_DEPS="${ADDITIONAL_DEV_APT_DEPS}" \
         --build-arg ADDITIONAL_DEV_APT_ENV="${ADDITIONAL_DEV_APT_ENV}" \
         --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="${AIRFLOW_PRE_CACHED_PIP_PACKAGES}" \
-        --build-arg INSTALL_AIRFLOW_VIA_PIP="${INSTALL_AIRFLOW_VIA_PIP}" \
-        --build-arg AIRFLOW_LOCAL_PIP_WHEELS="${AIRFLOW_LOCAL_PIP_WHEELS}" \
+        --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
+        --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
         --build-arg BUILD_ID="${CI_BUILD_ID}" \
         --build-arg COMMIT_SHA="${COMMIT_SHA}" \
         "${DOCKER_CACHE_PROD_BUILD_DIRECTIVE[@]}" \
@@ -787,8 +811,8 @@ function build_images::build_prod_images() {
         --build-arg ADDITIONAL_RUNTIME_APT_DEPS="${ADDITIONAL_RUNTIME_APT_DEPS}" \
         --build-arg ADDITIONAL_RUNTIME_APT_ENV="${ADDITIONAL_RUNTIME_APT_ENV}" \
         --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="${AIRFLOW_PRE_CACHED_PIP_PACKAGES}" \
-        --build-arg INSTALL_AIRFLOW_VIA_PIP="${INSTALL_AIRFLOW_VIA_PIP}" \
-        --build-arg AIRFLOW_LOCAL_PIP_WHEELS="${AIRFLOW_LOCAL_PIP_WHEELS}" \
+        --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
+        --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
         --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
         --build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \
         --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
@@ -894,11 +918,6 @@ function build_images::build_prod_images_from_packages() {
     # Build apache airflow packages
     build_airflow_packages::build_airflow_packages
 
-    # Remove generated tar.gz packages
-    rm -f "${AIRFLOW_SOURCES}/dist/"apache-airflow*.tar.gz
-
-    # move the packages to docker-context-files folder
-    mkdir -pv "${AIRFLOW_SOURCES}/docker-context-files"
     mv "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/"
     build_images::build_prod_images
 }
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index 19cacfe..e2410fd 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -215,7 +215,7 @@ function install_airflow_from_wheel() {
     local extras
     extras="${1}"
     local airflow_package
-    airflow_package=$(find /dist/ -maxdepth 1 -type f -name 'apache_airflow-*.whl')
+    airflow_package=$(find /dist/ -maxdepth 1 -type f -name 'apache_airflow-[0-9]*.whl')
     echo
     echo "Found package: ${airflow_package}. Installing."
     echo
@@ -228,6 +228,23 @@ function install_airflow_from_wheel() {
     pip install "${airflow_package}${1}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
 }
 
+function install_airflow_from_sdist() {
+    local extras
+    extras="${1}"
+    local airflow_package
+    airflow_package=$(find /dist/ -maxdepth 1 -type f -name 'apache-airflow-[0-9]*.tar.gz')
+    echo
+    echo "Found package: ${airflow_package}. Installing."
+    echo
+    if [[ -z "${airflow_package}" ]]; then
+        >&2 echo
+        >&2 echo "ERROR! Could not find airflow sdist package to install in dist"
+        >&2 echo
+        exit 4
+    fi
+    pip install "${airflow_package}${1}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
 function install_remaining_dependencies() {
     pip install apache-beam[gcp] >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
 }
diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh
index 7ef1e7a..55962aa 100755
--- a/scripts/in_container/entrypoint_ci.sh
+++ b/scripts/in_container/entrypoint_ci.sh
@@ -72,6 +72,9 @@ else
 fi
 
 if [[ -z ${INSTALL_AIRFLOW_VERSION=} ]]; then
+    echo
+    echo "Using already installed airflow version"
+    echo
     if [[ ! -d "${AIRFLOW_SOURCES}/airflow/www_rbac/node_modules" ]]; then
         echo
         echo "Installing node modules as they are not yet installed (Sources mounted from Host)"
@@ -84,7 +87,7 @@ if [[ -z ${INSTALL_AIRFLOW_VERSION=} ]]; then
     if [[ ! -d "${AIRFLOW_SOURCES}/airflow/www_rbac/static/dist" ]]; then
         pushd "${AIRFLOW_SOURCES}/airflow/www_rbac/" &>/dev/null || exit 1
         echo
-        echo "Building production version of javascript files (Sources mounted from Host)"
+        echo "Building production version of JavaScript files (Sources mounted from Host)"
         echo
         echo
         yarn run prod
@@ -98,12 +101,64 @@ if [[ -z ${INSTALL_AIRFLOW_VERSION=} ]]; then
     mkdir -p "${AIRFLOW_SOURCES}"/logs/
     mkdir -p "${AIRFLOW_SOURCES}"/tmp/
     export PYTHONPATH=${AIRFLOW_SOURCES}
+elif [[ ${INSTALL_AIRFLOW_VERSION} == "none"  ]]; then
+    echo
+    echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally"
+    echo
+    uninstall_airflow_and_providers
+elif [[ ${INSTALL_AIRFLOW_VERSION} == "wheel"  ]]; then
+    echo
+    echo "Install airflow from wheel package with [all] extras but uninstalling providers."
+    echo
+    uninstall_airflow_and_providers
+    install_airflow_from_wheel "[all]"
+    uninstall_providers
+elif [[ ${INSTALL_AIRFLOW_VERSION} == "sdist"  ]]; then
+    echo
+    echo "Install airflow from sdist package with [all] extras but uninstalling providers."
+    echo
+    uninstall_airflow_and_providers
+    install_airflow_from_sdist "[all]"
+    uninstall_providers
 else
-    install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}"
+    echo
+    echo "Install airflow from PyPI including [all] extras"
+    echo
+    install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" "[all]"
 fi
-
-if [[ ${INSTALL_WHEELS=} == "true" ]]; then
-  pip install /dist/*.whl || true
+if [[ ${INSTALL_PACKAGES_FROM_DIST=} == "true" ]]; then
+    echo
+    echo "Install all packages from dist folder"
+    if [[ ${INSTALL_AIRFLOW_VERSION} == "wheel" ]]; then
+        echo "(except apache-airflow)"
+    fi
+    if [[ ${PACKAGE_FORMAT} == "both" ]]; then
+        echo
+        echo "${COLOR_RED_ERROR}You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'${COLOR_RESET}"
+        echo
+        exit 1
+    fi
+    echo
+    installable_files=()
+    for file in /dist/*.{whl,tar.gz}
+    do
+        if [[ ${INSTALL_AIRFLOW_VERSION} == "wheel" && ${file} == "apache?airflow-[0-9]"* ]]; then
+            # Skip Apache Airflow package - it's just been installed above with extras
+            echo "Skipping ${file}"
+            continue
+        fi
+        if [[ ${PACKAGE_FORMAT} == "wheel" && ${file} == *".whl" ]]; then
+            echo "Adding ${file} to install"
+            installable_files+=( "${file}" )
+        fi
+        if [[ ${PACKAGE_FORMAT} == "sdist" && ${file} == *".tar.gz" ]]; then
+            echo "Adding ${file} to install"
+            installable_files+=( "${file}" )
+        fi
+    done
+    if (( ${#installable_files[@]} )); then
+        pip install "${installable_files[@]}" --no-deps
+    fi
 fi
 
 export RUN_AIRFLOW_1_10=${RUN_AIRFLOW_1_10:="false"}
@@ -130,24 +185,6 @@ if [[ ${ENVIRONMENT_EXIT_CODE} != 0 ]]; then
     exit ${ENVIRONMENT_EXIT_CODE}
 fi
 
-
-if [[ ${INTEGRATION_KERBEROS:="false"} == "true" ]]; then
-    set +e
-    setup_kerberos
-    RES=$?
-    set -e
-
-    if [[ ${RES} != 0 ]]; then
-        echo
-        echo "ERROR !!!!Kerberos initialisation requested, but failed"
-        echo
-        echo "I will exit now, and you need to run 'breeze --integration kerberos restart'"
-        echo "to re-enter breeze and restart kerberos."
-        echo
-        exit 1
-    fi
-fi
-
 # Create symbolic link to fix possible issues with kubectl config cmd-path
 mkdir -p /usr/lib/google-cloud-sdk/bin
 touch /usr/lib/google-cloud-sdk/bin/gcloud


[airflow] 39/44: Allow webserver to read pod logs directly (#12598)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 753f79d11343815876546edf06775b7b24a72c0d
Author: Daniel Imberman <da...@gmail.com>
AuthorDate: Tue Nov 24 15:13:23 2020 -0800

    Allow webserver to read pod logs directly (#12598)
    
    * Allow webserver to read pod logs directly
    
    For users who are testing the KubernetesExecutor, allows users to read
    pod logs via the Kubernetes API. Worth noting that these logs will only
    be accessible while the worker is running.
    
    * fix tests
    
    (cherry picked from commit 9f28e416dbc6374dc9c7115304731a7bc0b4bfa9)
---
 chart/templates/rbac/pod-log-reader-role.yaml      | 56 ++++++++++++++++++++++
 .../templates/rbac/pod-log-reader-rolebinding.yaml | 53 ++++++++++++++++++++
 chart/tests/test_basic_helm_chart.py               |  4 +-
 chart/values.schema.json                           |  4 ++
 chart/values.yaml                                  |  1 +
 5 files changed, 117 insertions(+), 1 deletion(-)

diff --git a/chart/templates/rbac/pod-log-reader-role.yaml b/chart/templates/rbac/pod-log-reader-role.yaml
new file mode 100644
index 0000000..72f5e35
--- /dev/null
+++ b/chart/templates/rbac/pod-log-reader-role.yaml
@@ -0,0 +1,56 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+################################
+## Airflow Pod Reader Role
+#################################
+{{- if and .Values.rbacEnabled .Values.webserver.allowPodLogReading }}
+{{- if .Values.multiNamespaceMode }}
+kind: ClusterRole
+{{- else }}
+kind: Role
+{{- end }}
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+  name: {{ .Release.Name }}-pod-log-reader-role
+{{- if not .Values.multiNamespaceMode }}
+  namespace: {{ .Release.Namespace }}
+{{- end }}
+  labels:
+    tier: airflow
+    release: {{ .Release.Name }}
+    chart: "{{ .Chart.Name }}-{{ .Chart.Version }}"
+    heritage: {{ .Release.Service }}
+{{- with .Values.labels }}
+{{ toYaml . | indent 4 }}
+{{- end }}
+rules:
+  - apiGroups:
+      - ""
+    resources:
+      - "pods"
+    verbs:
+      - "list"
+      - "get"
+      - "watch"
+  - apiGroups:
+      - ""
+    resources:
+      - "pods/log"
+    verbs:
+      - "get"
+{{- end }}
diff --git a/chart/templates/rbac/pod-log-reader-rolebinding.yaml b/chart/templates/rbac/pod-log-reader-rolebinding.yaml
new file mode 100644
index 0000000..25371eb
--- /dev/null
+++ b/chart/templates/rbac/pod-log-reader-rolebinding.yaml
@@ -0,0 +1,53 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+################################
+## Airflow Pod Reader Role Binding
+#################################
+{{- if and .Values.rbacEnabled .Values.webserver.allowPodLogReading }}
+{{- if .Values.multiNamespaceMode }}
+kind: ClusterRoleBinding
+{{- else }}
+kind: RoleBinding
+{{- end }}
+apiVersion: rbac.authorization.k8s.io/v1
+metadata:
+{{- if not .Values.multiNamespaceMode }}
+  namespace: {{ .Release.Namespace }}
+{{- end }}
+  name: {{ .Release.Name }}-pod-log-reader-rolebinding
+  labels:
+    tier: airflow
+    release: {{ .Release.Name }}
+    chart: "{{ .Chart.Name }}-{{ .Chart.Version }}"
+    heritage: {{ .Release.Service }}
+{{- with .Values.labels }}
+{{ toYaml . | indent 4 }}
+{{- end }}
+roleRef:
+  apiGroup: rbac.authorization.k8s.io
+{{- if .Values.multiNamespaceMode }}
+  kind: ClusterRole
+{{- else }}
+  kind: Role
+{{- end }}
+  name: {{ .Release.Name }}-pod-log-reader-role
+subjects:
+  - kind: ServiceAccount
+    name: {{ .Release.Name }}-webserver
+    namespace: {{ .Release.Namespace }}
+{{- end }}
diff --git a/chart/tests/test_basic_helm_chart.py b/chart/tests/test_basic_helm_chart.py
index 26ea1c1..af66267 100644
--- a/chart/tests/test_basic_helm_chart.py
+++ b/chart/tests/test_basic_helm_chart.py
@@ -22,7 +22,7 @@ import jmespath
 
 from tests.helm_template_generator import render_chart
 
-OBJECT_COUNT_IN_BASIC_DEPLOYMENT = 22
+OBJECT_COUNT_IN_BASIC_DEPLOYMENT = 24
 
 
 class TestBaseChartTest(unittest.TestCase):
@@ -50,7 +50,9 @@ class TestBaseChartTest(unittest.TestCase):
                 ('Secret', 'TEST-BASIC-airflow-result-backend'),
                 ('ConfigMap', 'TEST-BASIC-airflow-config'),
                 ('Role', 'TEST-BASIC-pod-launcher-role'),
+                ('Role', 'TEST-BASIC-pod-log-reader-role'),
                 ('RoleBinding', 'TEST-BASIC-pod-launcher-rolebinding'),
+                ('RoleBinding', 'TEST-BASIC-pod-log-reader-rolebinding'),
                 ('Service', 'TEST-BASIC-postgresql-headless'),
                 ('Service', 'TEST-BASIC-postgresql'),
                 ('Service', 'TEST-BASIC-statsd'),
diff --git a/chart/values.schema.json b/chart/values.schema.json
index f1d8271..3248b66 100644
--- a/chart/values.schema.json
+++ b/chart/values.schema.json
@@ -688,6 +688,10 @@
             "type": "object",
             "additionalProperties": false,
             "properties": {
+                "allowPodLogReading": {
+                  "description": "Allow webserver to read k8s pod logs. Useful when you don't have an external log store.",
+                  "type": "boolean"
+                },
                 "livenessProbe": {
                     "description": "Liveness probe configuration.",
                     "type": "object",
diff --git a/chart/values.yaml b/chart/values.yaml
index d84a785..38f26e5 100644
--- a/chart/values.yaml
+++ b/chart/values.yaml
@@ -387,6 +387,7 @@ scheduler:
 
 # Airflow webserver settings
 webserver:
+  allowPodLogReading: true
   livenessProbe:
     initialDelaySeconds: 15
     timeoutSeconds: 30


[airflow] 22/44: Fixes image building in DockerHub (#13039)

Posted by ka...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 4541025776a51740f3be8c2317e1d2e90e197661
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sun Dec 13 12:08:55 2020 +0100

    Fixes image building in DockerHub (#13039)
    
    (cherry picked from commit 0d49a4742c269fa537d59432cc4635286430a5a4)
---
 scripts/ci/images/ci_build_dockerhub.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/images/ci_build_dockerhub.sh
index e5e230b..00317df 100755
--- a/scripts/ci/images/ci_build_dockerhub.sh
+++ b/scripts/ci/images/ci_build_dockerhub.sh
@@ -113,10 +113,10 @@ else
     export DOCKER_CACHE="local"
     # Name the image based on the TAG rather than based on the branch name
     export FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}"
-    export AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${INSTALL_AIRFLOW_VERSION}"
     export AIRFLOW_SOURCES_FROM="empty"
     export AIRFLOW_SOURCES_TO="/empty"
     export INSTALL_AIRFLOW_VERSION="${DOCKER_TAG%-python*}"
+    export AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${INSTALL_AIRFLOW_VERSION}"
 
     # shellcheck source=scripts/ci/libraries/_script_init.sh
     . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"