You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2020/12/16 15:08:50 UTC

[airflow] branch v1-10-test updated (2adb41f -> 10101f08)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a change to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git.


    from 2adb41f  Add identity pre-commit hook (#13089)
     new 205c86c  Production images on CI are now built from packages (#12685)
     new 10101f08 Skip identity pre-commit in ci and always display it first

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .dockerignore                                      |  3 +-
 .github/workflows/build-images-workflow-run.yml    |  6 +--
 .github/workflows/ci.yml                           |  4 +-
 .github/workflows/scheduled_quarantined.yml        |  1 -
 .pre-commit-config.yaml                            |  8 ++--
 BREEZE.rst                                         |  8 ++++
 CI.rst                                             | 19 ++-------
 CONTRIBUTING.rst                                   |  2 +-
 Dockerfile.ci                                      | 16 ++++----
 LOCAL_VIRTUALENV.rst                               | 25 ++++++++++++
 breeze                                             |  2 +
 breeze-complete                                    |  4 +-
 docker-context-files/README.md                     |  2 +-
 .../ci_build_airflow_package.sh}                   | 12 +++---
 scripts/ci/docker-compose/files.yml                |  1 +
 scripts/ci/docker-compose/local.yml                |  1 -
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh   |  2 +
 scripts/ci/images/ci_verify_prod_image.sh          |  2 +-
 scripts/ci/libraries/_all_libs.sh                  |  2 +
 .../_build_airflow_packages.sh}                    | 41 ++++++++++---------
 scripts/ci/libraries/_initialization.sh            | 22 ++++-------
 scripts/ci/libraries/_local_mounts.sh              |  1 -
 scripts/ci/libraries/_runs.sh                      | 17 ++++++--
 scripts/ci/testing/ci_run_airflow_testing.sh       |  5 +--
 scripts/in_container/_in_container_utils.sh        | 16 --------
 scripts/in_container/check_environment.sh          | 46 ++++++++++++----------
 scripts/in_container/configure_environment.sh      |  1 -
 scripts/in_container/entrypoint_exec.sh            |  1 -
 tests/bats/breeze/test_breeze_complete.bats        |  8 ++++
 29 files changed, 152 insertions(+), 126 deletions(-)
 copy scripts/ci/{static_checks/helm_lint.sh => build_airflow/ci_build_airflow_package.sh} (79%)
 copy scripts/ci/{static_checks/mypy.sh => libraries/_build_airflow_packages.sh} (54%)
 mode change 100755 => 100644


[airflow] 02/02: Skip identity pre-commit in ci and always display it first

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 10101f0894e16d6504386812553f56bef8cc4cca
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Dec 16 13:17:50 2020 +0100

    Skip identity pre-commit in ci and always display it first
    
    (cherry picked from commit 76f2215b95ab8da0d3c4e821138b6dd4275b7c79)
---
 .github/workflows/ci.yml | 4 ++--
 .pre-commit-config.yaml  | 8 ++++----
 2 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 33fb5d4..a449d2c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -207,7 +207,7 @@ jobs:
     runs-on: ubuntu-20.04
     needs: [build-info, ci-images]
     env:
-      SKIP: "pylint"
+      SKIP: "pylint,identity"
       MOUNT_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
     if: needs.build-info.outputs.basic-checks-only == 'false'
@@ -243,7 +243,7 @@ jobs:
     runs-on: ubuntu-20.04
     needs: [build-info]
     env:
-      SKIP: "build,mypy,flake8,pylint,bats-in-container-tests"
+      SKIP: "build,mypy,flake8,pylint,bats-in-container-tests,identity"
       MOUNT_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
     if: needs.build-info.outputs.basic-checks-only == 'true'
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2e9edda..2e6207d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -21,6 +21,10 @@ default_language_version:
   python: python3
 minimum_pre_commit_version: "1.20.0"
 repos:
+  - repo: meta
+    hooks:
+      - id: identity
+      - id: check-hooks-apply
   - repo: https://github.com/Lucas-C/pre-commit-hooks
     rev: v1.1.9
     hooks:
@@ -146,10 +150,6 @@ repos:
         args:
           - "--maxlevel"
           - "2"
-  - repo: meta
-    hooks:
-      - id: identity
-      - id: check-hooks-apply
   - repo: https://github.com/pre-commit/pre-commit-hooks
     rev: v3.3.0
     hooks:


[airflow] 01/02: Production images on CI are now built from packages (#12685)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 205c86c3b5e1741d218eabc661131c3b3b81f4a0
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sun Dec 6 23:36:33 2020 +0100

    Production images on CI are now built from packages (#12685)
    
    So far, the production images of Airflow were using sources
    when they were built on CI. This PR changes that, to build
    airflow + providers packages first and install them
    rather than use sources as installation mechanism.
    
    Part of #12261
    
    (cherry picked from commit ed1825c0264d1f77c4754b722fb3721cbcd779d7)
---
 .dockerignore                                      |  3 +-
 .github/workflows/build-images-workflow-run.yml    |  6 +--
 .github/workflows/scheduled_quarantined.yml        |  1 -
 BREEZE.rst                                         |  8 ++++
 CI.rst                                             | 19 ++-------
 CONTRIBUTING.rst                                   |  2 +-
 Dockerfile.ci                                      | 16 ++++----
 LOCAL_VIRTUALENV.rst                               | 25 ++++++++++++
 breeze                                             |  2 +
 breeze-complete                                    |  4 +-
 docker-context-files/README.md                     |  2 +-
 .../ci_build_airflow_package.sh}                   | 18 ++++++---
 scripts/ci/docker-compose/files.yml                |  1 +
 scripts/ci/docker-compose/local.yml                |  1 -
 scripts/ci/images/ci_prepare_prod_image_on_ci.sh   |  2 +
 scripts/ci/images/ci_verify_prod_image.sh          |  2 +-
 scripts/ci/libraries/_all_libs.sh                  |  2 +
 .../libraries/_build_airflow_packages.sh}          | 32 ++++++++++-----
 scripts/ci/libraries/_initialization.sh            | 22 ++++-------
 scripts/ci/libraries/_local_mounts.sh              |  1 -
 scripts/ci/libraries/_runs.sh                      | 17 ++++++--
 scripts/ci/testing/ci_run_airflow_testing.sh       |  5 +--
 scripts/in_container/_in_container_utils.sh        | 16 --------
 scripts/in_container/check_environment.sh          | 46 ++++++++++++----------
 scripts/in_container/configure_environment.sh      |  1 -
 scripts/in_container/entrypoint_exec.sh            |  1 -
 tests/bats/breeze/test_breeze_complete.bats        |  8 ++++
 27 files changed, 152 insertions(+), 111 deletions(-)

diff --git a/.dockerignore b/.dockerignore
index ac7372b..8a90d74 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -59,8 +59,9 @@
 !.github
 !empty
 
-# This folder is for you if you want to add any files to the docker context when you build your own
+# This folder is for you if you want to add any packages to the docker context when you build your own
 # docker image. most of other files and any new folder you add will be excluded by default
+# if you need other types of files - please add the extensions here.
 !docker-context-files
 
 # Avoid triggering context change on README change (new companies using Airflow)
diff --git a/.github/workflows/build-images-workflow-run.yml b/.github/workflows/build-images-workflow-run.yml
index c5480c6..93b2176 100644
--- a/.github/workflows/build-images-workflow-run.yml
+++ b/.github/workflows/build-images-workflow-run.yml
@@ -23,7 +23,6 @@ on:  # yamllint disable-line rule:truthy
     types: ['requested']
 env:
   MOUNT_LOCAL_SOURCES: "false"
-  MOUNT_FILES: "true"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
@@ -328,7 +327,7 @@ jobs:
       - name: "Setup python"
         uses: actions/setup-python@v2
         with:
-          python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
+          python-version: ${{  matrix.python-version }}
         if: steps.defaults.outputs.proceed == 'true'
       - name: >
           Override "scripts/ci" with the "${{ needs.cancel-workflow-runs.outputs.targetBranch }}" branch
@@ -345,7 +344,8 @@ jobs:
         if: steps.defaults.outputs.proceed == 'true'
       - name: "Build CI images ${{ matrix.python-version }}:${{ github.event.workflow_run.id }}"
         run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-        if: matrix.image-type == 'CI' && steps.defaults.outputs.proceed == 'true'
+        # locally built CI image is needed to prepare packages for PROD image build
+        if: steps.defaults.outputs.proceed == 'true'
       - name: "Push CI images ${{ matrix.python-version }}:${{ github.event.workflow_run.id }}"
         run: ./scripts/ci/images/ci_push_ci_images.sh
         if: matrix.image-type == 'CI' && steps.defaults.outputs.proceed == 'true'
diff --git a/.github/workflows/scheduled_quarantined.yml b/.github/workflows/scheduled_quarantined.yml
index 14dc91d..9877e4c 100644
--- a/.github/workflows/scheduled_quarantined.yml
+++ b/.github/workflows/scheduled_quarantined.yml
@@ -24,7 +24,6 @@ on:  # yamllint disable-line rule:truthy
 
 env:
   MOUNT_LOCAL_SOURCES: "false"
-  MOUNT_FILES: "true"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
diff --git a/BREEZE.rst b/BREEZE.rst
index 43018a0..4c1b3a7 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1653,6 +1653,14 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           Default: 
 
+  -S, --version-suffix-for-pypi SUFFIX
+          Adds optional suffix to the version in the generated backport package. It can be used
+          to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
+
+  -N, --version-suffix-for-svn SUFFIX
+          Adds optional suffix to the generated names of package. It can be used to generate
+          rc1/rc2 ... versions of the packages to be uploaded to SVN.
+
   -v, --verbose
           Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
           debugging - when you run breeze with --verbose flags you will be able to see the commands
diff --git a/CI.rst b/CI.rst
index e087438..0ac1c9d 100644
--- a/CI.rst
+++ b/CI.rst
@@ -135,13 +135,6 @@ You can use those variables when you try to reproduce the build locally.
 |                                         |             |             |            | directories) generated locally on the           |
 |                                         |             |             |            | host during development.                        |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
-| ``MOUNT_FILES``                         |     true    |     true    |    true    | Determines whether "files" folder from          |
-|                                         |             |             |            | sources is mounted as "/files" folder           |
-|                                         |             |             |            | inside the container. This is used to           |
-|                                         |             |             |            | share results of local actions to the           |
-|                                         |             |             |            | host, as well as to pass host files to          |
-|                                         |             |             |            | inside container for local development.         |
-+-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 |                                                           Force variables                                                          |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 | ``FORCE_PULL_IMAGES``                   |    true     |    true     |    true    | Determines if images are force-pulled,          |
@@ -203,7 +196,9 @@ You can use those variables when you try to reproduce the build locally.
 |                                                           Image variables                                                          |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 | ``INSTALL_AIRFLOW_VERSION``             |             |             |            | Installs Airflow version from PyPI when         |
-|                                         |             |             |            | building image.                                 |
+|                                         |             |             |            | building image. Can be "none" to skip airflow   |
+|                                         |             |             |            | installation so that it can be installed from   |
+|                                         |             |             |            | locally prepared packages.                      |
 +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+
 | ``INSTALL_AIRFLOW_REFERENCE``           |             |             |            | Installs Airflow version from GitHub            |
 |                                         |             |             |            | branch or tag.                                  |
@@ -362,12 +357,6 @@ Note that you need to set "CI" variable to true in order to get the same results
 |                              |                      | [``pull_request``, ``pull_request_target``,         |
 |                              |                      |  ``schedule``, ``push``]                            |
 +------------------------------+----------------------+-----------------------------------------------------+
-| CI_SOURCE_REPO               | ``apache/airflow``   | Source repository. This might be different than the |
-|                              |                      | ``CI_TARGET_REPO`` for pull requests                |
-+------------------------------+----------------------+-----------------------------------------------------+
-| CI_SOURCE_BRANCH             | ``master``           | Branch in the source repository that is used to     |
-|                              |                      | make the pull request.                              |
-+------------------------------+----------------------+-----------------------------------------------------+
 | CI_REF                       | ``refs/head/master`` | Branch in the source repository that is used to     |
 |                              |                      | make the pull request.                              |
 +------------------------------+----------------------+-----------------------------------------------------+
@@ -700,7 +689,7 @@ We also have a script that can help to clean-up the old artifacts:
 CodeQL scan
 -----------
 
-The CodeQL security scan uses GitHub security scan framework to scan our code for security violations.
+The `CodeQL <https://securitylab.github.com/tools/codeql>`_ security scan uses GitHub security scan framework to scan our code for security violations.
 It is run for JavaScript and python code.
 
 Naming conventions for stored images
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index bc5661c..bc64117 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -579,7 +579,7 @@ and not installed together with the core, unless you set ``INSTALL_PROVIDERS_FRO
 variable to ``true``.
 
 In Breeze - which is a development environment, ``INSTALL_PROVIDERS_FROM_SOURCES`` variable is set to true,
-but you can add ``--skip-installing-airflow-providers`` flag to Breeze to skip installing providers when
+but you can add ``--skip-installing-airflow-providers-from-sources`` flag to Breeze to skip installing providers when
 building the images.
 
 One watch-out - providers are still always installed (or rather available) if you install airflow from
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 2210989..67e5bb1 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -261,11 +261,11 @@ ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH}
 ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
 ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES}
 
-ARG AIRFLOW_LOCAL_PIP_WHEELS=""
-ENV AIRFLOW_LOCAL_PIP_WHEELS=${AIRFLOW_LOCAL_PIP_WHEELS}
+ARG INSTALL_FROM_DOCKER_CONTEXT_FILES=""
+ENV INSTALL_FROM_DOCKER_CONTEXT_FILES=${INSTALL_FROM_DOCKER_CONTEXT_FILES}
 
-ARG INSTALL_AIRFLOW_VIA_PIP="true"
-ENV INSTALL_AIRFLOW_VIA_PIP=${INSTALL_AIRFLOW_VIA_PIP}
+ARG INSTALL_FROM_PYPI="true"
+ENV INSTALL_FROM_PYPI=${INSTALL_FROM_PYPI}
 
 RUN pip install --upgrade "pip==${PIP_VERSION}"
 
@@ -317,7 +317,7 @@ ENV UPGRADE_TO_LATEST_CONSTRAINTS=${UPGRADE_TO_LATEST_CONSTRAINTS}
 # Usually we will install versions based on the dependencies in setup.py and upgraded only if needed.
 # But in cron job we will install latest versions matching setup.py to see if there is no breaking change
 # and push the constraints if everything is successful
-RUN if [[ ${INSTALL_AIRFLOW_VIA_PIP} == "true" ]]; then \
+RUN if [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \
         if [[ "${UPGRADE_TO_LATEST_CONSTRAINTS}" != "false" ]]; then \
             pip install -e ".[${AIRFLOW_EXTRAS}]" --upgrade --upgrade-strategy eager; \
         else \
@@ -329,9 +329,9 @@ RUN if [[ ${INSTALL_AIRFLOW_VIA_PIP} == "true" ]]; then \
 # they are also installed additionally to whatever is installed from Airflow.
 COPY docker-context-files/ /docker-context-files/
 
-RUN if [[ ${AIRFLOW_LOCAL_PIP_WHEELS} != "true" ]]; then \
-        if ls /docker-context-files/*.whl 1> /dev/null 2>&1; then \
-            pip install --no-deps /docker-context-files/*.whl; \
+RUN if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} != "true" ]]; then \
+        if ls /docker-context-files/*.{whl,tar.gz} 1> /dev/null 2>&1; then \
+            pip install --no-deps /docker-context-files/*.{whl,tar.gz}; \
         fi ; \
     fi
 
diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst
index 574366d..03b60c8 100644
--- a/LOCAL_VIRTUALENV.rst
+++ b/LOCAL_VIRTUALENV.rst
@@ -171,6 +171,31 @@ Activate your virtualenv, e.g. by using ``workon``, and once you are in it, run:
     cd airflow/www
     yarn build
 
+Developing Providers
+--------------------
+
+In Airflow 2.0 we introduced split of Apache Airflow into separate packages - there is one main
+apache-airflow package with core of Airflow and 70+ packages for all providers (external services
+and software Airflow can communicate with).
+
+Developing providers is part of Airflow development, but when you install airflow as editable in your local
+development environment, the corresponding provider packages will be also installed from PyPI. However, the
+providers will also be present in your "airflow/providers" folder. This might lead to confusion,
+which sources of providers are imported during development. It will depend on your
+environment's PYTHONPATH setting in general.
+
+In order to avoid the confusion, you can set ``INSTALL_PROVIDERS_FROM_SOURCES`` environment to ``true``
+before running ``pip install`` command:
+
+.. code-block:: bash
+
+  INSTALL_PROVIDERS_FROM_SOURCES="true" pip install -U -e ".[devel,<OTHER EXTRAS>]" \
+     --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt"
+
+This way no providers packages will be installed and they will always be imported from the "airflow/providers"
+folder.
+
+
 Running Tests
 -------------
 
diff --git a/breeze b/breeze
index f6a45a5..7a26b30 100755
--- a/breeze
+++ b/breeze
@@ -1676,6 +1676,7 @@ ${CMDNAME} prepare-airflow-packages [FLAGS]
 
 Flags:
 $(breeze::flag_packages)
+$(breeze::flag_version_suffix)
 $(breeze::flag_verbosity)
 "
     readonly DETAILED_USAGE_PREPARE_AIRFLOW_PACKAGES
@@ -2140,6 +2141,7 @@ function breeze::flag_local_file_mounting() {
 -l, --skip-mounting-local-sources
         Skips mounting local volume with sources - you get exactly what is in the
         docker image rather than your current local sources of Airflow.
+
 "
 }
 
diff --git a/breeze-complete b/breeze-complete
index 7e1ccc6..042a7fa 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -140,12 +140,12 @@ build-cache-local build-cache-pulled build-cache-disabled disable-pip-cache
 dockerhub-user: dockerhub-repo: github-registry github-repository: github-image-id:
 postgres-version: mysql-version:
 additional-extras: additional-python-deps: additional-dev-deps: additional-runtime-deps: image-tag:
-disable-mysql-client-installation constraints-location: disable-pip-cache add-local-pip-wheels
+disable-mysql-client-installation constraints-location: disable-pip-cache install-from-docker-context-files
 additional-extras: additional-python-deps: skip-installing-airflow-via-pip
 dev-apt-deps: additional-dev-apt-deps: dev-apt-command: additional-dev-apt-command: additional-dev-apt-env:
 runtime-apt-deps: additional-runtime-apt-deps: runtime-apt-command: additional-runtime-apt-command: additional-runtime-apt-env:
 load-default-connections load-example-dags
-install-wheels no-rbac-ui
+install-packages-from-dist no-rbac-ui package-format:
 test-type:
 preserve-volumes
 "
diff --git a/docker-context-files/README.md b/docker-context-files/README.md
index 52213cb..07a6c22 100644
--- a/docker-context-files/README.md
+++ b/docker-context-files/README.md
@@ -26,7 +26,7 @@ th [docker-context-files](.) folder to the image context - in case of production
 the build segment, co content of the folder is available in the `/docker-context-file` folder inside
 the build image. You can store constraint files and wheel
 packages there that you want to install as PYPI packages and refer to those packages using
-`--constraint-location` flag for constraints or by using `--add-local-pip-wheels` flag.
+`--constraint-location` flag for constraints or by using `--install-from-local-files-when-building` flag.
 
 By default, the content of this folder is .gitignored so that any binaries and files you put here are only
 used for local builds and not committed to the repository.
diff --git a/scripts/ci/docker-compose/files.yml b/scripts/ci/build_airflow/ci_build_airflow_package.sh
old mode 100644
new mode 100755
similarity index 67%
copy from scripts/ci/docker-compose/files.yml
copy to scripts/ci/build_airflow/ci_build_airflow_package.sh
index 5625ca6..593d94a
--- a/scripts/ci/docker-compose/files.yml
+++ b/scripts/ci/build_airflow/ci_build_airflow_package.sh
@@ -1,3 +1,4 @@
+#!/usr/bin/env bash
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -14,9 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
----
-version: "2.2"
-services:
-  airflow:
-    volumes:
-      - ../../../files:/files:cached
+# shellcheck source=scripts/ci/libraries/_script_init.sh
+. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+
+build_airflow_packages::build_airflow_packages
+
+cd "${AIRFLOW_SOURCES}/dist" || exit 1
+
+dump_file="/tmp/airflow_$(date +"%Y%m%d-%H%M%S").tar.gz"
+tar -cvzf "${dump_file}" .
+
+echo "Airflow is in dist and also tar-gzipped in ${dump_file}"
diff --git a/scripts/ci/docker-compose/files.yml b/scripts/ci/docker-compose/files.yml
index 5625ca6..2925bd9 100644
--- a/scripts/ci/docker-compose/files.yml
+++ b/scripts/ci/docker-compose/files.yml
@@ -20,3 +20,4 @@ services:
   airflow:
     volumes:
       - ../../../files:/files:cached
+      - ../../../dist:/dist:cached
diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml
index 20ce4d3..1fb6200 100644
--- a/scripts/ci/docker-compose/local.yml
+++ b/scripts/ci/docker-compose/local.yml
@@ -43,7 +43,6 @@ services:
       - ../../../dags:/opt/airflow/dags:cached
       - ../../../dev:/opt/airflow/dev:cached
       - ../../../docs:/opt/airflow/docs:cached
-      - ../../../dist:/dist:cached
       - ../../../hooks:/opt/airflow/hooks:cached
       - ../../../logs:/root/airflow/logs:cached
       - ../../../pytest.ini:/opt/airflow/pytest.ini:cached
diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
index 700487c..43defdf 100755
--- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
+++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh
@@ -23,6 +23,8 @@ export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
 export DOCKER_CACHE="local"
 export VERBOSE="true"
 
+export INSTALLED_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
+readonly INSTALLED_EXTRAS
 
 # shellcheck source=scripts/ci/libraries/_script_init.sh
 . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
diff --git a/scripts/ci/images/ci_verify_prod_image.sh b/scripts/ci/images/ci_verify_prod_image.sh
index 30f9def..ec529b4 100755
--- a/scripts/ci/images/ci_verify_prod_image.sh
+++ b/scripts/ci/images/ci_verify_prod_image.sh
@@ -62,7 +62,7 @@ function verify_prod_image_dependencies {
     docker run --rm --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" -c 'pip check'
     local res=$?
     if [[ ${res} != "0" ]]; then
-        echo  "${COLOR_RED_ERROR} ^^^ Some dependencies are conflicting. See instructions below on how to deal with it.  ${COLOR_RESET}"
+        echo "${COLOR_RED_ERROR} ^^^ Some dependencies are conflicting. See instructions below on how to deal with it.  ${COLOR_RESET}"
         echo
         build_images::inform_about_pip_check "--production "
         # TODO(potiuk) - enable the comment once https://github.com/apache/airflow/pull/12188 is merged
diff --git a/scripts/ci/libraries/_all_libs.sh b/scripts/ci/libraries/_all_libs.sh
index 43e2e23..d676e77 100755
--- a/scripts/ci/libraries/_all_libs.sh
+++ b/scripts/ci/libraries/_all_libs.sh
@@ -32,6 +32,8 @@ readonly SCRIPTS_CI_DIR
 . "${LIBRARIES_DIR}"/_repeats.sh
 # shellcheck source=scripts/ci/libraries/_sanity_checks.sh
 . "${LIBRARIES_DIR}"/_sanity_checks.sh
+# shellcheck source=scripts/ci/libraries/_build_airflow_packages.sh
+. "${LIBRARIES_DIR}"/_build_airflow_packages.sh
 # shellcheck source=scripts/ci/libraries/_build_images.sh
 . "${LIBRARIES_DIR}"/_build_images.sh
 # shellcheck source=scripts/ci/libraries/_kind.sh
diff --git a/scripts/in_container/entrypoint_exec.sh b/scripts/ci/libraries/_build_airflow_packages.sh
old mode 100755
new mode 100644
similarity index 54%
copy from scripts/in_container/entrypoint_exec.sh
copy to scripts/ci/libraries/_build_airflow_packages.sh
index 728872b..3ec1bca
--- a/scripts/in_container/entrypoint_exec.sh
+++ b/scripts/ci/libraries/_build_airflow_packages.sh
@@ -16,16 +16,30 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# shellcheck source=scripts/in_container/_in_container_script_init.sh
-. /opt/airflow/scripts/in_container/_in_container_script_init.sh
+# Build airflow packages
+function build_airflow_packages::build_airflow_packages() {
+    rm -rf -- *egg-info*
+    rm -rf -- build
 
-# shellcheck source=scripts/in_container/configure_environment.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/configure_environment.sh"
+    pip install --upgrade "pip==${PIP_VERSION}" "wheel==${WHEEL_VERSION}"
 
-# shellcheck source=scripts/in_container/run_init_script.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/run_init_script.sh"
+    local packages=()
 
-# shellcheck source=scripts/in_container/run_tmux.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/run_tmux.sh"
+    if [[ ${PACKAGE_FORMAT} == "wheel" || ${PACKAGE_FORMAT} == "both" ]] ; then
+        packages+=("bdist_wheel")
+    fi
+    if [[ ${PACKAGE_FORMAT} == "sdist" || ${PACKAGE_FORMAT} == "both" ]] ; then
+        packages+=("sdist")
+    fi
 
-exec /bin/bash "${@}"
+    # Prepare airflow's wheel
+    python setup.py compile_assets "${packages[@]}"
+
+    # clean-up
+    rm -rf -- *egg-info*
+    rm -rf -- build
+
+    echo
+    echo "Airflow package prepared: ${PACKAGE_FORMAT}"
+    echo
+}
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 50cdbf0..40437dd 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -126,9 +126,9 @@ function initialization::initialize_base_variables() {
     # If set to true, RBAC UI will not be used for 1.10 version
     export DISABLE_RBAC=${DISABLE_RBAC:="false"}
 
-    # if set to true, the ci image will look for wheel packages in dist folder and will install them
+    # if set to true, the ci image will look for packages in dist folder and will install them
     # during entering the container
-    export INSTALL_WHEELS=${INSTALL_WHEELS:="false"}
+    export INSTALL_PACKAGES_FROM_DIST=${INSTALL_PACKAGES_FROM_DIST:="false"}
 
     # If set the specified file will be used to initialize Airflow after the environment is created,
     # otherwise it will use files/airflow-breeze-config/init.sh
@@ -235,9 +235,6 @@ function initialization::initialize_mount_variables() {
     # Whether necessary for airflow run local sources are mounted to docker
     export MOUNT_LOCAL_SOURCES=${MOUNT_LOCAL_SOURCES:="true"}
 
-    # Whether files folder from local sources are mounted to docker
-    export MOUNT_FILES=${MOUNT_FILES:="true"}
-
     if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then
         verbosity::print_info
         verbosity::print_info "Mounting necessary host volumes to Docker"
@@ -249,14 +246,9 @@ function initialization::initialize_mount_variables() {
         verbosity::print_info
     fi
 
-    if [[ ${MOUNT_FILES} == "true" ]]; then
-        verbosity::print_info
-        verbosity::print_info "Mounting files folder to Docker"
-        verbosity::print_info
-        EXTRA_DOCKER_FLAGS+=("-v" "${AIRFLOW_SOURCES}/files:/files")
-    fi
-
     EXTRA_DOCKER_FLAGS+=(
+        "-v" "${AIRFLOW_SOURCES}/files:/files"
+        "-v" "${AIRFLOW_SOURCES}/dist:/dist"
         "--rm"
         "--env-file" "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env"
     )
@@ -383,10 +375,10 @@ function initialization::initialize_image_build_variables() {
 
     # whether installation of Airflow should be done via PIP. You can set it to false if you have
     # all the binary packages (including airflow) in the docker-context-files folder and use
-    # AIRFLOW_LOCAL_PIP_WHEELS="true" to install it from there.
-    export INSTALL_AIRFLOW_VIA_PIP="${INSTALL_AIRFLOW_VIA_PIP:="true"}"
+    # INSTALL_FROM_DOCKER_CONTEXT_FILES="true" to install it from there.
+    export INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI:="true"}"
     # whether installation should be performed from the local wheel packages in "docker-context-files" folder
-    export AIRFLOW_LOCAL_PIP_WHEELS="${AIRFLOW_LOCAL_PIP_WHEELS:="false"}"
+    export INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES:="false"}"
     # reference to CONSTRAINTS. they can be overwritten manually or replaced with AIRFLOW_CONSTRAINTS_LOCATION
     export AIRFLOW_CONSTRAINTS_REFERENCE="${AIRFLOW_CONSTRAINTS_REFERENCE:=""}"
     # direct constraints Location - can be URL or path to local file. If empty, it will be calculated
diff --git a/scripts/ci/libraries/_local_mounts.sh b/scripts/ci/libraries/_local_mounts.sh
index 39790c9..0ccd250 100644
--- a/scripts/ci/libraries/_local_mounts.sh
+++ b/scripts/ci/libraries/_local_mounts.sh
@@ -39,7 +39,6 @@ function local_mounts::generate_local_mounts_list {
         "$prefix"dags:/opt/airflow/dags:cached
         "$prefix"dev:/opt/airflow/dev:cached
         "$prefix"docs:/opt/airflow/docs:cached
-        "$prefix"dist:/dist:cached
         "$prefix"hooks:/opt/airflow/hooks:cached
         "$prefix"logs:/root/airflow/logs:cached
         "$prefix"pytest.ini:/opt/airflow/pytest.ini:cached
diff --git a/scripts/ci/libraries/_runs.sh b/scripts/ci/libraries/_runs.sh
index 7b4e51e..17ab1ee 100644
--- a/scripts/ci/libraries/_runs.sh
+++ b/scripts/ci/libraries/_runs.sh
@@ -19,9 +19,10 @@
 # Docker command to build documentation
 function runs::run_docs() {
     docker run "${EXTRA_DOCKER_FLAGS[@]}" -t \
-            --entrypoint "/usr/local/bin/dumb-init"  \
-            "${AIRFLOW_CI_IMAGE}" \
-            "--" "/opt/airflow/scripts/in_container/run_docs_build.sh" "${@}"
+        -e "GITHUB_ACTIONS=${GITHUB_ACTIONS="false"}" \
+        --entrypoint "/usr/local/bin/dumb-init"  \
+        "${AIRFLOW_CI_IMAGE}" \
+        "--" "/opt/airflow/scripts/in_container/run_docs_build.sh" "${@}"
 }
 
 
@@ -32,3 +33,13 @@ function runs::run_generate_constraints() {
         "${AIRFLOW_CI_IMAGE}" \
         "--" "/opt/airflow/scripts/in_container/run_generate_constraints.sh"
 }
+
+# Docker command to prepare provider packages
+function runs::run_prepare_airflow_packages() {
+    docker run "${EXTRA_DOCKER_FLAGS[@]}" \
+        --entrypoint "/usr/local/bin/dumb-init"  \
+        -t \
+        -v "${AIRFLOW_SOURCES}:/opt/airflow" \
+        "${AIRFLOW_CI_IMAGE}" \
+        "--" "/opt/airflow/scripts/in_container/run_prepare_airflow_packages.sh" "${@}"
+}
diff --git a/scripts/ci/testing/ci_run_airflow_testing.sh b/scripts/ci/testing/ci_run_airflow_testing.sh
index 39f1501..8bc6169 100755
--- a/scripts/ci/testing/ci_run_airflow_testing.sh
+++ b/scripts/ci/testing/ci_run_airflow_testing.sh
@@ -103,15 +103,12 @@ build_images::rebuild_ci_image_if_needed
 
 initialization::set_mysql_encoding
 
-DOCKER_COMPOSE_LOCAL=()
+DOCKER_COMPOSE_LOCAL=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml")
 
 if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then
     DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml")
 fi
 
-if [[ ${MOUNT_FILES} == "true" ]]; then
-    DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml")
-fi
 
 if [[ ${GITHUB_ACTIONS} == "true" ]]; then
     DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/ga.yml")
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index 1dbcdd4..f1a55c8 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -260,22 +260,6 @@ function uninstall_airflow() {
     find /root/airflow/ -type f -print0 | xargs -0 rm -f --
 }
 
-function uninstall_providers() {
-    echo
-    echo "Uninstalling all provider packages"
-    echo
-    local provider_packages_to_uninstall
-    provider_packages_to_uninstall=$(pip freeze | grep apache-airflow-providers || true)
-    if [[ -n ${provider_packages_to_uninstall} ]]; then
-        echo "${provider_packages_to_uninstall}" | xargs pip uninstall -y || true 2>/dev/null
-    fi
-}
-
-function uninstall_airflow_and_providers() {
-    uninstall_providers
-    uninstall_airflow
-}
-
 function install_released_airflow_version() {
     local version="${1}"
     local extras="${2}"
diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh
index 9ab400a..258036f 100755
--- a/scripts/in_container/check_environment.sh
+++ b/scripts/in_container/check_environment.sh
@@ -20,13 +20,14 @@ EXIT_CODE=0
 
 DISABLED_INTEGRATIONS=""
 
-function check_service() {
-    INTEGRATION_NAME=$1
+function check_service {
+    LABEL=$1
     CALL=$2
     MAX_CHECK=${3:=1}
 
-    echo -n "${INTEGRATION_NAME}: "
-    while true; do
+    echo -n "${LABEL}: "
+    while true
+    do
         set +e
         LAST_CHECK_RESULT=$(eval "${CALL}" 2>&1)
         RES=$?
@@ -36,7 +37,7 @@ function check_service() {
             break
         else
             echo -n "."
-            MAX_CHECK=$((MAX_CHECK - 1))
+            MAX_CHECK=$((MAX_CHECK-1))
         fi
         if [[ ${MAX_CHECK} == 0 ]]; then
             echo "${COLOR_RED_ERROR} Maximum number of retries while checking service. Exiting ${COLOR_RESET}"
@@ -55,8 +56,11 @@ function check_service() {
     fi
 }
 
-function check_integration() {
-    INTEGRATION_NAME=$1
+function check_integration {
+    INTEGRATION_LABEL=$1
+    INTEGRATION_NAME=$2
+    CALL=$3
+    MAX_CHECK=${4:=1}
 
     ENV_VAR_NAME=INTEGRATION_${INTEGRATION_NAME^^}
     if [[ ${!ENV_VAR_NAME:=} != "true" ]]; then
@@ -65,16 +69,16 @@ function check_integration() {
         fi
         return
     fi
-    check_service "${@}"
+    check_service "${INTEGRATION_LABEL}" "${CALL}" "${MAX_CHECK}"
 }
 
-function check_db_backend() {
+function check_db_backend {
     MAX_CHECK=${1:=1}
 
     if [[ ${BACKEND} == "postgres" ]]; then
-        check_service "postgres" "nc -zvv postgres 5432" "${MAX_CHECK}"
+        check_service "PostgreSQL" "nc -zvv postgres 5432" "${MAX_CHECK}"
     elif [[ ${BACKEND} == "mysql" ]]; then
-        check_service "mysql" "nc -zvv mysql 3306" "${MAX_CHECK}"
+        check_service "MySQL" "nc -zvv mysql 3306" "${MAX_CHECK}"
     elif [[ ${BACKEND} == "sqlite" ]]; then
         return
     else
@@ -119,7 +123,8 @@ function startairflow_if_requested() {
             airflow initdb
             airflow create_user -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email || true
         else
-            airflow create_user -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email
+            airflow db init
+            airflow users create -u admin -p admin -f Thor -l Adminstra -r Admin -e dummy@dummy.email
         fi
 
         # shellcheck source=scripts/in_container/run_init_script.sh
@@ -136,13 +141,14 @@ if [[ -n ${BACKEND=} ]]; then
     check_db_backend 20
     echo "-----------------------------------------------------------------------------------------------"
 fi
-check_integration kerberos "nc -zvv kerberos 88" 30
-check_integration mongo "nc -zvv mongo 27017" 20
-check_integration redis "nc -zvv redis 6379" 20
-check_integration rabbitmq "nc -zvv rabbitmq 5672" 20
-check_integration cassandra "nc -zvv cassandra 9042" 20
-check_integration openldap "nc -zvv openldap 389" 20
-check_integration presto "nc -zvv presto 8080" 40
+check_integration "Kerberos" "kerberos" "nc -zvv kerberos 88" 30
+check_integration "MongoDB" "mongo" "nc -zvv mongo 27017" 20
+check_integration "Redis" "redis" "nc -zvv redis 6379" 20
+check_integration "RabbitMQ" "rabbitmq" "nc -zvv rabbitmq 5672" 20
+check_integration "Cassandra" "cassandra" "nc -zvv cassandra 9042" 20
+check_integration "OpenLDAP" "openldap" "nc -zvv openldap 389" 20
+check_integration "Presto (API)" "presto" \
+    "curl --max-time 1 http://presto:8080/v1/info/ | grep '\"starting\":false'" 20
 echo "-----------------------------------------------------------------------------------------------"
 
 if [[ ${EXIT_CODE} != 0 ]]; then
@@ -165,5 +171,3 @@ if [[ -n ${DISABLED_INTEGRATIONS=} ]]; then
     echo "Enable them via --integration <INTEGRATION_NAME> flags (you can use 'all' for all)"
     echo
 fi
-
-exit 0
diff --git a/scripts/in_container/configure_environment.sh b/scripts/in_container/configure_environment.sh
index 3fe7858..01c3487 100644
--- a/scripts/in_container/configure_environment.sh
+++ b/scripts/in_container/configure_environment.sh
@@ -16,7 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 # Script to check licences for all code. Can be started from any working directory
-# shellcheck source=scripts/in_container/_in_container_script_init.sh
 export FILES_DIR="/files"
 export AIRFLOW_BREEZE_CONFIG_DIR="${FILES_DIR}/airflow-breeze-config"
 VARIABLES_ENV_FILE="variables.env"
diff --git a/scripts/in_container/entrypoint_exec.sh b/scripts/in_container/entrypoint_exec.sh
index 728872b..4423810 100755
--- a/scripts/in_container/entrypoint_exec.sh
+++ b/scripts/in_container/entrypoint_exec.sh
@@ -15,7 +15,6 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 # shellcheck source=scripts/in_container/_in_container_script_init.sh
 . /opt/airflow/scripts/in_container/_in_container_script_init.sh
 
diff --git a/tests/bats/breeze/test_breeze_complete.bats b/tests/bats/breeze/test_breeze_complete.bats
index 2e9d1da..3ca32ce 100644
--- a/tests/bats/breeze/test_breeze_complete.bats
+++ b/tests/bats/breeze/test_breeze_complete.bats
@@ -272,3 +272,11 @@
 
   assert_equal "" "${TEST_TYPE}"
 }
+
+@test "Test default package format is wheel" {
+  load ../bats_utils
+  #shellcheck source=breeze-complete
+  source "${AIRFLOW_SOURCES}/breeze-complete"
+
+  assert_equal "wheel" "${PACKAGE_FORMAT}"
+}