You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2021/01/21 20:19:46 UTC

[airflow] 21/44: Install airflow and providers from dist and verifies them (#13033)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 0c8fe86bf99be74d700088cee3fe3044bb12e03c
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sat Dec 12 19:38:30 2020 +0100

    Install airflow and providers from dist and verifies them  (#13033)
    
    * Install airflow and providers from dist and verifies them
    
    This check is there to prevent problems similar to those reported
    in #13027 and fixed in #13031.
    
    Previously we always built airflow from wheels, only providers were
    installed from sdist packages and tested. In this version both
    airflow and providers are installed using the same package format
    (sdist or wheel).
    
    * Update scripts/in_container/entrypoint_ci.sh
    
    Co-authored-by: Kaxil Naik <ka...@gmail.com>
    
    Co-authored-by: Kaxil Naik <ka...@gmail.com>
    (cherry picked from commit abf2a4264b18e750dbc2eb384a86d08d821dfba4)
---
 .github/workflows/ci.yml                    |  8 ++-
 BREEZE.rst                                  |  4 +-
 Dockerfile                                  |  2 +-
 breeze-complete                             |  1 +
 docs/production-deployment.rst              |  1 +
 scripts/ci/images/ci_verify_ci_image.sh     |  2 +
 scripts/ci/images/ci_verify_prod_image.sh   |  6 +--
 scripts/ci/libraries/_build_images.sh       | 71 +++++++++++++++---------
 scripts/in_container/_in_container_utils.sh | 19 ++++++-
 scripts/in_container/entrypoint_ci.sh       | 83 +++++++++++++++++++++--------
 10 files changed, 139 insertions(+), 58 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 74670d3..bb06e0d 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -28,7 +28,6 @@ on:  # yamllint disable-line rule:truthy
 env:
 
   MOUNT_LOCAL_SOURCES: "false"
-  MOUNT_FILES: "true"
   FORCE_ANSWER_TO_QUESTIONS: "yes"
   FORCE_PULL_IMAGES: "true"
   CHECK_IMAGE_FOR_REBUILD: "true"
@@ -42,6 +41,7 @@ env:
   GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
   GITHUB_REGISTRY_PULL_IMAGE_TAG: "${{ github.run_id }}"
   GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest"
+  INSTALL_PROVIDERS_FROM_SOURCES: "true"
 
   # You can switch between building the image in "Build Images" workflow or building them in CI workflow
   # Separately for each job.
@@ -195,6 +195,8 @@ jobs:
       - name: "Free space"
         run: ./scripts/ci/tools/ci_free_space_on_ci.sh
         if: needs.build-info.outputs.waitForImage == 'true'
+      - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
+        run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
       - name: "Verify CI image Py${{matrix.python-version}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_verify_ci_image.sh
         if: needs.build-info.outputs.waitForImage == 'true'
@@ -451,7 +453,7 @@ jobs:
           retention-days: 7
 
   tests-sqlite:
-    timeout-minutes: 60
+    timeout-minutes: 80
     name: >
       Sqlite Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}}
     runs-on: ubuntu-20.04
@@ -649,6 +651,8 @@ jobs:
       - name: "Free space"
         run: ./scripts/ci/tools/ci_free_space_on_ci.sh
         if: needs.build-info.outputs.waitForImage == 'true'
+      - name: "Prepare PROD Image"
+        run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh
       - name: "Verify PROD image Py${{matrix.python-version}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_verify_prod_image.sh
         if: needs.build-info.outputs.waitForImage == 'true'
diff --git a/BREEZE.rst b/BREEZE.rst
index 633fb4d..03290c3 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1254,7 +1254,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel
+                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
@@ -2209,7 +2209,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
           If specified, installs Airflow directly from PIP released version. This happens at
           image building time in production image and at container entering time for CI image. One of:
 
-                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel
+                 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist
 
           When 'none' is used, you can install airflow from local packages. When building image,
           airflow package should be added to 'docker-context-files' and
diff --git a/Dockerfile b/Dockerfile
index eecc683..a34b63e 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -255,7 +255,7 @@ RUN if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then \
         pip install --user ${ADDITIONAL_PYTHON_DEPS} --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}"; \
     fi; \
     if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \
-        if ls /docker-context-files/*.whl 1> /dev/null 2>&1; then \
+        if ls /docker-context-files/*.{whl,tar.gz} 1> /dev/null 2>&1; then \
             pip install --user --no-deps /docker-context-files/*.{whl,tar.gz}; \
         fi ; \
     fi; \
diff --git a/breeze-complete b/breeze-complete
index 819938b..cdf8fe9 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -58,6 +58,7 @@ _breeze_allowed_install_airflow_versions=$(cat <<-EOF
 1.10.9
 none
 wheel
+sdist
 EOF
 )
 
diff --git a/docs/production-deployment.rst b/docs/production-deployment.rst
index b0ffa02..22059ec 100644
--- a/docs/production-deployment.rst
+++ b/docs/production-deployment.rst
@@ -525,6 +525,7 @@ production image. There are three types of build:
 |                                   |                        | GitHub repository tag or branch or "." to install from sources.                   |
 |                                   |                        | Note that installing from local sources requires appropriate values of the        |
 |                                   |                        | ``AIRFLOW_SOURCES_FROM`` and ``AIRFLOW_SOURCES_TO`` variables as described below. |
+|                                   |                        | Only used when ``INSTALL_FROM_PYPI`` is set to ``true``.                          |
 +-----------------------------------+------------------------+-----------------------------------------------------------------------------------+
 | ``AIRFLOW_INSTALL_VERSION``       |                        | Optional - might be used for package installation of different Airflow version    |
 |                                   |                        | for example"==1.10.14". For consistency, you should also set``AIRFLOW_VERSION``   |
diff --git a/scripts/ci/images/ci_verify_ci_image.sh b/scripts/ci/images/ci_verify_ci_image.sh
index 004eac0..831fd28 100755
--- a/scripts/ci/images/ci_verify_ci_image.sh
+++ b/scripts/ci/images/ci_verify_ci_image.sh
@@ -52,4 +52,6 @@ function pull_ci_image() {
 
 build_images::prepare_ci_build
 
+pull_ci_image
+
 verify_ci_image_dependencies
diff --git a/scripts/ci/images/ci_verify_prod_image.sh b/scripts/ci/images/ci_verify_prod_image.sh
index 274261b..30f9def 100755
--- a/scripts/ci/images/ci_verify_prod_image.sh
+++ b/scripts/ci/images/ci_verify_prod_image.sh
@@ -16,7 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 # shellcheck source=scripts/ci/libraries/_script_init.sh
-. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh"
+. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh"
 
 function verify_prod_image_has_airflow {
     echo
@@ -52,7 +52,6 @@ function verify_prod_image_has_airflow {
     fi
 }
 
-
 function verify_prod_image_dependencies {
 
     echo
@@ -70,7 +69,7 @@ function verify_prod_image_dependencies {
         # exit ${res}
     else
         echo
-        echo " \e[32mOK. The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent.  ${COLOR_RESET}"
+        echo "${COLOR_GREEN_OK} The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent.  ${COLOR_RESET}"
         echo
     fi
     set -e
@@ -89,6 +88,7 @@ function pull_prod_image() {
 
 build_images::prepare_prod_build
 
+pull_prod_image
 
 verify_prod_image_has_airflow
 
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 296124f..d0e0213 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -117,10 +117,10 @@ function build_images::forget_last_answer() {
 }
 
 function build_images::confirm_via_terminal() {
-    echo > "${DETECTED_TERMINAL}"
-    echo > "${DETECTED_TERMINAL}"
-    echo "Make sure that you rebased to latest master before rebuilding!" > "${DETECTED_TERMINAL}"
-    echo > "${DETECTED_TERMINAL}"
+    echo >"${DETECTED_TERMINAL}"
+    echo >"${DETECTED_TERMINAL}"
+    echo "Make sure that you rebased to latest master before rebuilding!" >"${DETECTED_TERMINAL}"
+    echo >"${DETECTED_TERMINAL}"
     # Make sure to use output of tty rather than stdin/stdout when available - this way confirm
     # will works also in case of pre-commits (git does not pass stdin/stdout to pre-commit hooks)
     # shellcheck disable=SC2094
@@ -217,6 +217,31 @@ function build_images::confirm_image_rebuild() {
     fi
 }
 
+function build_images::confirm_non-empty-docker-context-files() {
+    local num_docker_context_files
+    num_docker_context_files=$(find "${AIRFLOW_SOURCES}/docker-context-files/" -type f |\
+        grep -c v "README.md" )
+    if [[ ${num_docker_context_files} == "0" ]]; then
+        if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "false" ]]; then
+            >&2 echo
+            >&2 echo "ERROR! You want to install packages from docker-context-files"
+            >&2 echo "       but there are no packages to install in this folder."
+            >&2 echo
+            exit 1
+        fi
+    else
+        if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "false" ]]; then
+            >&2 echo
+            >&2 echo "ERROR! There are some extra files in docker-context-files except README.md"
+            >&2 echo "       And you did not choose --install-from-docker-context-files flag"
+            >&2 echo "       This might result in unnecessary cache invalidation and long build times"
+            >&2 echo "       Exiting now - please remove those files (except README.md) and retry"
+            >&2 echo
+            exit 2
+        fi
+    fi
+}
+
 # Builds local image manifest
 # It contains only one .json file - result of docker inspect - describing the image
 # We cannot use docker registry APIs as they are available only with authorisation
@@ -251,8 +276,8 @@ function build_images::get_local_build_cache_hash() {
         return
     fi
     docker cp "local-airflow-ci-container:/build-cache-hash" \
-        "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}" 2> /dev/null \
-        || touch "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}"
+        "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}" 2>/dev/null ||
+        touch "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}"
     set -e
     verbosity::print_info
     verbosity::print_info "Local build cache hash: '$(cat "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}")'"
@@ -305,8 +330,8 @@ function build_images::compare_local_and_remote_build_cache_hash() {
     local local_hash
     local_hash=$(cat "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}")
 
-    if [[ ${remote_hash} != "${local_hash}" ||
-        ${local_hash} == "" ]]; then
+    if [[ ${remote_hash} != "${local_hash}" || ${local_hash} == "" ]] \
+        ; then
         echo
         echo
         echo "Your image and the dockerhub have different or missing build cache hashes."
@@ -370,7 +395,7 @@ function build_images::get_docker_image_names() {
     export BUILT_CI_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}"
 
     # GitHub Registry names must be lowercase :(
-    github_repository_lowercase="$(echo "${GITHUB_REPOSITORY}" |tr '[:upper:]' '[:lower:]')"
+    github_repository_lowercase="$(echo "${GITHUB_REPOSITORY}" | tr '[:upper:]' '[:lower:]')"
     export GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE="${GITHUB_REGISTRY}/${github_repository_lowercase}/${AIRFLOW_PROD_BASE_TAG}"
     export GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE="${GITHUB_REGISTRY}/${github_repository_lowercase}/${AIRFLOW_PROD_BASE_TAG}-build"
     export GITHUB_REGISTRY_PYTHON_BASE_IMAGE="${GITHUB_REGISTRY}/${github_repository_lowercase}/python:${PYTHON_BASE_IMAGE_VERSION}-slim-buster"
@@ -380,7 +405,7 @@ function build_images::get_docker_image_names() {
 }
 
 # If GitHub Registry is used, login to the registry using GITHUB_USERNAME and GITHUB_TOKEN
-function build_image::login_to_github_registry_if_needed()  {
+function build_image::login_to_github_registry_if_needed() {
     if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then
         if [[ -n ${GITHUB_TOKEN=} ]]; then
             echo "${GITHUB_TOKEN}" | docker login \
@@ -454,9 +479,8 @@ function build_images::rebuild_ci_image_if_needed() {
             echo "Checking if the remote image needs to be pulled"
             echo
             build_images::get_remote_image_build_cache_hash
-            if [[ ${REMOTE_DOCKER_REGISTRY_UNREACHABLE:=} != "true" && \
-                  ${LOCAL_MANIFEST_IMAGE_UNAVAILABLE:=} != "true" ]]; then
-                    build_images::compare_local_and_remote_build_cache_hash
+            if [[ ${REMOTE_DOCKER_REGISTRY_UNREACHABLE:=} != "true" && ${LOCAL_MANIFEST_IMAGE_UNAVAILABLE:=} != "true" ]]; then
+                build_images::compare_local_and_remote_build_cache_hash
             else
                 FORCE_PULL_IMAGES="true"
             fi
@@ -572,7 +596,7 @@ function build_images::build_ci_image() {
         )
     fi
 
-    if [[ -n ${SPIN_PID:=""} ]]; then
+    if [[ -n ${SPIN_PID=} ]]; then
         kill -HUP "${SPIN_PID}" || true
         wait "${SPIN_PID}" || true
         echo >"${DETECTED_TERMINAL}"
@@ -624,8 +648,8 @@ Docker building ${AIRFLOW_CI_IMAGE}.
         --build-arg ADDITIONAL_RUNTIME_APT_COMMAND="${ADDITIONAL_RUNTIME_APT_COMMAND}" \
         --build-arg ADDITIONAL_RUNTIME_APT_DEPS="${ADDITIONAL_RUNTIME_APT_DEPS}" \
         --build-arg ADDITIONAL_RUNTIME_APT_ENV="${ADDITIONAL_RUNTIME_APT_ENV}" \
-        --build-arg INSTALL_AIRFLOW_VIA_PIP="${INSTALL_AIRFLOW_VIA_PIP}" \
-        --build-arg AIRFLOW_LOCAL_PIP_WHEELS="${AIRFLOW_LOCAL_PIP_WHEELS}" \
+        --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
+        --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
         --build-arg UPGRADE_TO_LATEST_CONSTRAINTS="${UPGRADE_TO_LATEST_CONSTRAINTS}" \
         --build-arg BUILD_ID="${CI_BUILD_ID}" \
         --build-arg COMMIT_SHA="${COMMIT_SHA}" \
@@ -655,7 +679,7 @@ Docker building ${AIRFLOW_CI_IMAGE}.
 # DockerHub user etc. the variables are set so that other functions can use those variables.
 function build_images::prepare_prod_build() {
     if [[ -n "${INSTALL_AIRFLOW_REFERENCE=}" ]]; then
-        # When --install-airflow-reference is used then the image is build from github tag
+        # When --install-airflow-reference is used then the image is build from GitHub tag
         EXTRA_DOCKER_PROD_BUILD_FLAGS=(
             "--build-arg" "AIRFLOW_INSTALLATION_METHOD=https://github.com/apache/airflow/archive/${INSTALL_AIRFLOW_REFERENCE}.tar.gz#egg=apache-airflow"
         )
@@ -758,8 +782,8 @@ function build_images::build_prod_images() {
         --build-arg ADDITIONAL_DEV_APT_DEPS="${ADDITIONAL_DEV_APT_DEPS}" \
         --build-arg ADDITIONAL_DEV_APT_ENV="${ADDITIONAL_DEV_APT_ENV}" \
         --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="${AIRFLOW_PRE_CACHED_PIP_PACKAGES}" \
-        --build-arg INSTALL_AIRFLOW_VIA_PIP="${INSTALL_AIRFLOW_VIA_PIP}" \
-        --build-arg AIRFLOW_LOCAL_PIP_WHEELS="${AIRFLOW_LOCAL_PIP_WHEELS}" \
+        --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
+        --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
         --build-arg BUILD_ID="${CI_BUILD_ID}" \
         --build-arg COMMIT_SHA="${COMMIT_SHA}" \
         "${DOCKER_CACHE_PROD_BUILD_DIRECTIVE[@]}" \
@@ -787,8 +811,8 @@ function build_images::build_prod_images() {
         --build-arg ADDITIONAL_RUNTIME_APT_DEPS="${ADDITIONAL_RUNTIME_APT_DEPS}" \
         --build-arg ADDITIONAL_RUNTIME_APT_ENV="${ADDITIONAL_RUNTIME_APT_ENV}" \
         --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="${AIRFLOW_PRE_CACHED_PIP_PACKAGES}" \
-        --build-arg INSTALL_AIRFLOW_VIA_PIP="${INSTALL_AIRFLOW_VIA_PIP}" \
-        --build-arg AIRFLOW_LOCAL_PIP_WHEELS="${AIRFLOW_LOCAL_PIP_WHEELS}" \
+        --build-arg INSTALL_FROM_PYPI="${INSTALL_FROM_PYPI}" \
+        --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \
         --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
         --build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \
         --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \
@@ -894,11 +918,6 @@ function build_images::build_prod_images_from_packages() {
     # Build apache airflow packages
     build_airflow_packages::build_airflow_packages
 
-    # Remove generated tar.gz packages
-    rm -f "${AIRFLOW_SOURCES}/dist/"apache-airflow*.tar.gz
-
-    # move the packages to docker-context-files folder
-    mkdir -pv "${AIRFLOW_SOURCES}/docker-context-files"
     mv "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/"
     build_images::build_prod_images
 }
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index 19cacfe..e2410fd 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -215,7 +215,7 @@ function install_airflow_from_wheel() {
     local extras
     extras="${1}"
     local airflow_package
-    airflow_package=$(find /dist/ -maxdepth 1 -type f -name 'apache_airflow-*.whl')
+    airflow_package=$(find /dist/ -maxdepth 1 -type f -name 'apache_airflow-[0-9]*.whl')
     echo
     echo "Found package: ${airflow_package}. Installing."
     echo
@@ -228,6 +228,23 @@ function install_airflow_from_wheel() {
     pip install "${airflow_package}${1}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
 }
 
+function install_airflow_from_sdist() {
+    local extras
+    extras="${1}"
+    local airflow_package
+    airflow_package=$(find /dist/ -maxdepth 1 -type f -name 'apache-airflow-[0-9]*.tar.gz')
+    echo
+    echo "Found package: ${airflow_package}. Installing."
+    echo
+    if [[ -z "${airflow_package}" ]]; then
+        >&2 echo
+        >&2 echo "ERROR! Could not find airflow sdist package to install in dist"
+        >&2 echo
+        exit 4
+    fi
+    pip install "${airflow_package}${1}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
+}
+
 function install_remaining_dependencies() {
     pip install apache-beam[gcp] >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1
 }
diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh
index 7ef1e7a..55962aa 100755
--- a/scripts/in_container/entrypoint_ci.sh
+++ b/scripts/in_container/entrypoint_ci.sh
@@ -72,6 +72,9 @@ else
 fi
 
 if [[ -z ${INSTALL_AIRFLOW_VERSION=} ]]; then
+    echo
+    echo "Using already installed airflow version"
+    echo
     if [[ ! -d "${AIRFLOW_SOURCES}/airflow/www_rbac/node_modules" ]]; then
         echo
         echo "Installing node modules as they are not yet installed (Sources mounted from Host)"
@@ -84,7 +87,7 @@ if [[ -z ${INSTALL_AIRFLOW_VERSION=} ]]; then
     if [[ ! -d "${AIRFLOW_SOURCES}/airflow/www_rbac/static/dist" ]]; then
         pushd "${AIRFLOW_SOURCES}/airflow/www_rbac/" &>/dev/null || exit 1
         echo
-        echo "Building production version of javascript files (Sources mounted from Host)"
+        echo "Building production version of JavaScript files (Sources mounted from Host)"
         echo
         echo
         yarn run prod
@@ -98,12 +101,64 @@ if [[ -z ${INSTALL_AIRFLOW_VERSION=} ]]; then
     mkdir -p "${AIRFLOW_SOURCES}"/logs/
     mkdir -p "${AIRFLOW_SOURCES}"/tmp/
     export PYTHONPATH=${AIRFLOW_SOURCES}
+elif [[ ${INSTALL_AIRFLOW_VERSION} == "none"  ]]; then
+    echo
+    echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally"
+    echo
+    uninstall_airflow_and_providers
+elif [[ ${INSTALL_AIRFLOW_VERSION} == "wheel"  ]]; then
+    echo
+    echo "Install airflow from wheel package with [all] extras but uninstalling providers."
+    echo
+    uninstall_airflow_and_providers
+    install_airflow_from_wheel "[all]"
+    uninstall_providers
+elif [[ ${INSTALL_AIRFLOW_VERSION} == "sdist"  ]]; then
+    echo
+    echo "Install airflow from sdist package with [all] extras but uninstalling providers."
+    echo
+    uninstall_airflow_and_providers
+    install_airflow_from_sdist "[all]"
+    uninstall_providers
 else
-    install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}"
+    echo
+    echo "Install airflow from PyPI including [all] extras"
+    echo
+    install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" "[all]"
 fi
-
-if [[ ${INSTALL_WHEELS=} == "true" ]]; then
-  pip install /dist/*.whl || true
+if [[ ${INSTALL_PACKAGES_FROM_DIST=} == "true" ]]; then
+    echo
+    echo "Install all packages from dist folder"
+    if [[ ${INSTALL_AIRFLOW_VERSION} == "wheel" ]]; then
+        echo "(except apache-airflow)"
+    fi
+    if [[ ${PACKAGE_FORMAT} == "both" ]]; then
+        echo
+        echo "${COLOR_RED_ERROR}You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'${COLOR_RESET}"
+        echo
+        exit 1
+    fi
+    echo
+    installable_files=()
+    for file in /dist/*.{whl,tar.gz}
+    do
+        if [[ ${INSTALL_AIRFLOW_VERSION} == "wheel" && ${file} == "apache?airflow-[0-9]"* ]]; then
+            # Skip Apache Airflow package - it's just been installed above with extras
+            echo "Skipping ${file}"
+            continue
+        fi
+        if [[ ${PACKAGE_FORMAT} == "wheel" && ${file} == *".whl" ]]; then
+            echo "Adding ${file} to install"
+            installable_files+=( "${file}" )
+        fi
+        if [[ ${PACKAGE_FORMAT} == "sdist" && ${file} == *".tar.gz" ]]; then
+            echo "Adding ${file} to install"
+            installable_files+=( "${file}" )
+        fi
+    done
+    if (( ${#installable_files[@]} )); then
+        pip install "${installable_files[@]}" --no-deps
+    fi
 fi
 
 export RUN_AIRFLOW_1_10=${RUN_AIRFLOW_1_10:="false"}
@@ -130,24 +185,6 @@ if [[ ${ENVIRONMENT_EXIT_CODE} != 0 ]]; then
     exit ${ENVIRONMENT_EXIT_CODE}
 fi
 
-
-if [[ ${INTEGRATION_KERBEROS:="false"} == "true" ]]; then
-    set +e
-    setup_kerberos
-    RES=$?
-    set -e
-
-    if [[ ${RES} != 0 ]]; then
-        echo
-        echo "ERROR !!!!Kerberos initialisation requested, but failed"
-        echo
-        echo "I will exit now, and you need to run 'breeze --integration kerberos restart'"
-        echo "to re-enter breeze and restart kerberos."
-        echo
-        exit 1
-    fi
-fi
-
 # Create symbolic link to fix possible issues with kubectl config cmd-path
 mkdir -p /usr/lib/google-cloud-sdk/bin
 touch /usr/lib/google-cloud-sdk/bin/gcloud