You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/04/03 20:35:35 UTC

[airflow] branch main updated: Prepare Breeze2 for prime time :) (#22713)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 4ffd4f0953 Prepare Breeze2 for prime time :) (#22713)
4ffd4f0953 is described below

commit 4ffd4f09532fceb67675fce4c1f5cd383eff992e
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Sun Apr 3 22:35:26 2022 +0200

    Prepare Breeze2 for prime time :) (#22713)
    
    This is a review and clean-up for all the parameters and
    commands for Breeze2 in order to prepare it for being
    used by the contribugors.
    
    There are various small fixes here and there, removal
    of duplicated code, refactoring and moving code around
    as well as cleanup and review all the parameters used
    for all implemented commands.
    
    The parameters, default values and their behaviours were
    updated to match "new" life of Breeze rather than old
    one.
    
    Some improvements are made to the autocomplete and
    click help messages printed.  Full list of choices is
    always displayed, parameters are groups according to
    their target audience, and they were sorted according
    to importance and frequency of use.
    
    Various messages have been colourised according to their
    meaning - warnings as yellow, errors as red and
    informational messages as bright_blue.
    
    The `dry-run` option has been added to just show what
    would have been run without actually running some
    potentially "write" commands (read commands are still
    executed) so that you can easily verify and manually
    copy and execute the commands with option to modify
    them before. The `dry_run` and `verbose` options are
    now used for all commands.
    
    The "main" command now runs "shell" by default similarly
    as the original Breeze.
    
    All "shortcut" parameters have been standardized - i.e
    common options (verbose/dry run/help) have one and all
    common flags that are likely to be used often have an
    assigned shortcute.
    
    The "stop" and "cleanup" command have been added
    as they are necessary for average user to complete the
    regular usage cycle.
    
    Documentation for all the important methods have been
    updated.
---
 .github/workflows/build-images.yml                 |   2 +-
 .github/workflows/ci.yml                           |   2 +-
 .pre-commit-config.yaml                            |   2 +-
 BREEZE.rst                                         |  21 -
 CONTRIBUTING.rst                                   |   2 +-
 Dockerfile.ci                                      |  50 +-
 airflow/www/ask_for_recompile_assets_if_needed.sh  |   2 +-
 breeze                                             |  12 -
 breeze-complete                                    |   2 +-
 dev/breeze/setup.cfg                               |   7 +-
 dev/breeze/src/airflow_breeze/branch_defaults.py   |  21 +
 dev/breeze/src/airflow_breeze/breeze.py            | 947 ++++++++++++++++-----
 .../{docs_generator => build_image}/__init__.py    |   1 +
 .../{ => build_image}/ci/__init__.py               |   1 +
 .../build_image/ci/build_ci_image.py               | 122 +++
 .../ci/build_ci_params.py}                         |  95 +--
 .../{ => build_image}/prod/__init__.py             |   1 +
 .../build_image/prod/build_prod_image.py           | 173 ++++
 .../prod/build_prod_params.py}                     | 141 +--
 dev/breeze/src/airflow_breeze/cache.py             | 112 ---
 dev/breeze/src/airflow_breeze/ci/build_image.py    | 124 ---
 dev/breeze/src/airflow_breeze/console.py           |  21 -
 .../docs_generator/build_documentation.py          |  39 -
 dev/breeze/src/airflow_breeze/global_constants.py  | 110 +--
 .../src/airflow_breeze/prod/build_prod_image.py    | 200 -----
 dev/breeze/src/airflow_breeze/shell/__init__.py    |   1 +
 dev/breeze/src/airflow_breeze/shell/enter_shell.py | 313 +++----
 .../shell/{shell_builder.py => shell_params.py}    | 110 ++-
 dev/breeze/src/airflow_breeze/utils/__init__.py    |   1 +
 dev/breeze/src/airflow_breeze/utils/cache.py       | 141 +++
 .../doc_builder.py => utils/console.py}            |  33 +-
 .../airflow_breeze/utils/docker_command_utils.py   | 314 ++++++-
 .../src/airflow_breeze/utils/host_info_utils.py    |  15 +-
 .../src/airflow_breeze/utils/md5_build_check.py    | 116 +++
 dev/breeze/src/airflow_breeze/utils/path_utils.py  |  71 +-
 dev/breeze/src/airflow_breeze/utils/registry.py    |  56 ++
 dev/breeze/src/airflow_breeze/utils/run_utils.py   | 293 ++++---
 dev/breeze/src/airflow_breeze/utils/visuals.py     | 144 ++++
 dev/breeze/src/airflow_breeze/visuals/__init__.py  | 103 ---
 .../src/airflow_ci/find_newer_dependencies.py      |  25 +-
 dev/breeze/src/airflow_ci/freespace.py             |  35 +-
 dev/breeze/tests/test_build_image.py               |  22 +-
 dev/breeze/tests/test_cache.py                     |  12 +-
 dev/breeze/tests/test_commands.py                  |  18 +-
 dev/breeze/tests/test_docker_command_utils.py      |  25 +-
 dev/breeze/tests/test_find_airflow_directory.py    |  18 +-
 dev/breeze/tests/test_prod_image.py                |  73 +-
 dev/retag_docker_images.py                         |   6 +-
 scripts/ci/docker-compose/_docker.env              |   1 -
 scripts/ci/docker-compose/base.yml                 |   1 -
 scripts/ci/libraries/_build_images.sh              |  14 +-
 scripts/docker/entrypoint_ci.sh                    |  25 +-
 scripts/in_container/_in_container_utils.sh        |  12 +-
 scripts/in_container/check_environment.sh          |  23 +-
 scripts/in_container/configure_environment.sh      |  18 +-
 scripts/in_container/run_ci_tests.sh               |   4 +-
 scripts/in_container/run_docs_build.sh             |   2 +-
 scripts/in_container/run_init_script.sh            |   5 -
 .../run_install_and_test_provider_packages.sh      |   2 +-
 .../run_prepare_provider_documentation.sh          |   4 +-
 scripts/in_container/run_system_tests.sh           |   4 +-
 61 files changed, 2501 insertions(+), 1769 deletions(-)

diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml
index 1d41133732..81af305346 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -229,7 +229,7 @@ jobs:
       - name: "Free space"
         run: airflow-freespace
       - name: "Build CI image ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
-        run: Breeze2 build-ci-image
+        run: Breeze2 build-image
       - name: "Push CI image ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_push_ci_images.sh
       - name: >
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 3ec3dab163..a532faa65c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -310,7 +310,7 @@ jobs:
         run: airflow-freespace
         if: needs.build-info.outputs.inWorkflowBuild == 'true'
       - name: "Build CI image ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
-        run: Breeze2 build-ci-image
+        run: Breeze2 build-image
         if: needs.build-info.outputs.inWorkflowBuild == 'true'
       - name: "Push CI image ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}"
         run: ./scripts/ci/images/ci_push_ci_images.sh
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 03f5076161..6cbaec4d52 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -19,7 +19,7 @@ default_stages: [commit, push]
 default_language_version:
   # force all unspecified python hooks to run python3
   python: python3
-minimum_pre_commit_version: "1.20.0"
+minimum_pre_commit_version: "2.0.0"
 repos:
   - repo: meta
     hooks:
diff --git a/BREEZE.rst b/BREEZE.rst
index 4b649db11d..00a7286e50 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1336,13 +1336,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
   --image-tag TAG
           Additional tag in the image.
 
-  --skip-installing-airflow-providers-from-sources
-          By default 'pip install' in Airflow 2.0 installs only the provider packages that
-          are needed by the extras. When you build image during the development (which is
-          default in Breeze) all providers are installed by default from sources.
-          You can disable it by adding this flag but then you have to install providers from
-          wheel packages via --use-packages-from-dist flag.
-
   --disable-pypi-when-building
           Disable installing Airflow from pypi when building. If you use this flag and want
           to install Airflow, you have to install it from packages placed in
@@ -2031,13 +2024,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
   --image-tag TAG
           Additional tag in the image.
 
-  --skip-installing-airflow-providers-from-sources
-          By default 'pip install' in Airflow 2.0 installs only the provider packages that
-          are needed by the extras. When you build image during the development (which is
-          default in Breeze) all providers are installed by default from sources.
-          You can disable it by adding this flag but then you have to install providers from
-          wheel packages via --use-packages-from-dist flag.
-
   --disable-pypi-when-building
           Disable installing Airflow from pypi when building. If you use this flag and want
           to install Airflow, you have to install it from packages placed in
@@ -2632,13 +2618,6 @@ This is the current syntax for  `./breeze <./breeze>`_:
   --image-tag TAG
           Additional tag in the image.
 
-  --skip-installing-airflow-providers-from-sources
-          By default 'pip install' in Airflow 2.0 installs only the provider packages that
-          are needed by the extras. When you build image during the development (which is
-          default in Breeze) all providers are installed by default from sources.
-          You can disable it by adding this flag but then you have to install providers from
-          wheel packages via --use-packages-from-dist flag.
-
   --disable-pypi-when-building
           Disable installing Airflow from pypi when building. If you use this flag and want
           to install Airflow, you have to install it from packages placed in
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 2cb7e9db28..f020066f67 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -644,7 +644,7 @@ and not packaged together with the core, unless you set ``INSTALL_PROVIDERS_FROM
 variable to ``true``.
 
 In Breeze - which is a development environment, ``INSTALL_PROVIDERS_FROM_SOURCES`` variable is set to true,
-but you can add ``--skip-installing-airflow-providers-from-sources`` flag to Breeze to skip installing providers when
+but you can add ``--install-providers-from-sources=true`` flag to Breeze to skip installing providers when
 building the images.
 
 One watch-out - providers are still always installed (or rather available) if you install airflow from
diff --git a/Dockerfile.ci b/Dockerfile.ci
index d386f38ff1..135b5a2a53 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -640,10 +640,11 @@ export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}}
 if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
 
     echo
-    echo "Airflow home: ${AIRFLOW_HOME}"
-    echo "Airflow sources: ${AIRFLOW_SOURCES}"
-    echo "Airflow core SQL connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}"
-
+    echo "${COLOR_BLUE}Running Initialization. Your basic configuration is:${COLOR_RESET}"
+    echo
+    echo "  * ${COLOR_BLUE}Airflow home:${COLOR_RESET} ${AIRFLOW_HOME}"
+    echo "  * ${COLOR_BLUE}Airflow sources:${COLOR_RESET} ${AIRFLOW_SOURCES}"
+    echo "  * ${COLOR_BLUE}Airflow core SQL connection:${COLOR_RESET} ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}"
     echo
 
     RUN_TESTS=${RUN_TESTS:="false"}
@@ -653,7 +654,7 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
     if [[ ${USE_AIRFLOW_VERSION} == "" ]]; then
         export PYTHONPATH=${AIRFLOW_SOURCES}
         echo
-        echo "Using already installed airflow version"
+        echo "${COLOR_BLUE}Using airflow version from current sources${COLOR_RESET}"
         echo
         if [[ -d "${AIRFLOW_SOURCES}/airflow/www/" ]]; then
             pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null
@@ -667,38 +668,38 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
         mkdir -p "${AIRFLOW_SOURCES}"/tmp/
     elif [[ ${USE_AIRFLOW_VERSION} == "none"  ]]; then
         echo
-        echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally"
+        echo "${COLOR_BLUE}Skip installing airflow - only install wheel/tar.gz packages that are present locally.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
     elif [[ ${USE_AIRFLOW_VERSION} == "wheel"  ]]; then
         echo
-        echo "Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers."
+        echo "${COLOR_BLUE}Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
         install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]"
         uninstall_providers
     elif [[ ${USE_AIRFLOW_VERSION} == "sdist"  ]]; then
         echo
-        echo "Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers."
+        echo "${COLOR_BLUE}Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
         install_airflow_from_sdist "[${AIRFLOW_EXTRAS}]"
         uninstall_providers
     else
         echo
-        echo "Install airflow from PyPI without extras"
+        echo "${COLOR_BLUE}Install airflow from PyPI without extras"
         echo
         install_released_airflow_version "${USE_AIRFLOW_VERSION}"
     fi
     if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then
         echo
-        echo "Install all packages from dist folder"
+        echo "${COLOR_BLUE}Install all packages from dist folder"
         if [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then
             echo "(except apache-airflow)"
         fi
         if [[ ${PACKAGE_FORMAT} == "both" ]]; then
             echo
-            echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'${COLOR_RESET}"
+            echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'.${COLOR_RESET}"
             echo
             exit 1
         fi
@@ -782,7 +783,7 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
 
     cd "${AIRFLOW_SOURCES}"
 
-    if [[ ${START_AIRFLOW:="false"} == "true" ]]; then
+    if [[ ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" ]]; then
         export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS}
         export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES}
         # shellcheck source=scripts/in_container/bin/run_tmux
@@ -998,10 +999,11 @@ export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}}
 if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
 
     echo
-    echo "Airflow home: ${AIRFLOW_HOME}"
-    echo "Airflow sources: ${AIRFLOW_SOURCES}"
-    echo "Airflow core SQL connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}"
-
+    echo "${COLOR_BLUE}Running Initialization. Your basic configuration is:${COLOR_RESET}"
+    echo
+    echo "  * ${COLOR_BLUE}Airflow home:${COLOR_RESET} ${AIRFLOW_HOME}"
+    echo "  * ${COLOR_BLUE}Airflow sources:${COLOR_RESET} ${AIRFLOW_SOURCES}"
+    echo "  * ${COLOR_BLUE}Airflow core SQL connection:${COLOR_RESET} ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}"
     echo
 
     RUN_TESTS=${RUN_TESTS:="false"}
@@ -1011,7 +1013,7 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
     if [[ ${USE_AIRFLOW_VERSION} == "" ]]; then
         export PYTHONPATH=${AIRFLOW_SOURCES}
         echo
-        echo "Using already installed airflow version"
+        echo "${COLOR_BLUE}Using airflow version from current sources${COLOR_RESET}"
         echo
         if [[ -d "${AIRFLOW_SOURCES}/airflow/www/" ]]; then
             pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null
@@ -1025,38 +1027,38 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
         mkdir -p "${AIRFLOW_SOURCES}"/tmp/
     elif [[ ${USE_AIRFLOW_VERSION} == "none"  ]]; then
         echo
-        echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally"
+        echo "${COLOR_BLUE}Skip installing airflow - only install wheel/tar.gz packages that are present locally.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
     elif [[ ${USE_AIRFLOW_VERSION} == "wheel"  ]]; then
         echo
-        echo "Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers."
+        echo "${COLOR_BLUE}Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
         install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]"
         uninstall_providers
     elif [[ ${USE_AIRFLOW_VERSION} == "sdist"  ]]; then
         echo
-        echo "Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers."
+        echo "${COLOR_BLUE}Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
         install_airflow_from_sdist "[${AIRFLOW_EXTRAS}]"
         uninstall_providers
     else
         echo
-        echo "Install airflow from PyPI without extras"
+        echo "${COLOR_BLUE}Install airflow from PyPI without extras"
         echo
         install_released_airflow_version "${USE_AIRFLOW_VERSION}"
     fi
     if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then
         echo
-        echo "Install all packages from dist folder"
+        echo "${COLOR_BLUE}Install all packages from dist folder"
         if [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then
             echo "(except apache-airflow)"
         fi
         if [[ ${PACKAGE_FORMAT} == "both" ]]; then
             echo
-            echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'${COLOR_RESET}"
+            echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'.${COLOR_RESET}"
             echo
             exit 1
         fi
@@ -1140,7 +1142,7 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
 
     cd "${AIRFLOW_SOURCES}"
 
-    if [[ ${START_AIRFLOW:="false"} == "true" ]]; then
+    if [[ ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" ]]; then
         export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS}
         export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES}
         # shellcheck source=scripts/in_container/bin/run_tmux
diff --git a/airflow/www/ask_for_recompile_assets_if_needed.sh b/airflow/www/ask_for_recompile_assets_if_needed.sh
index d1a6f34cbd..a50502e777 100755
--- a/airflow/www/ask_for_recompile_assets_if_needed.sh
+++ b/airflow/www/ask_for_recompile_assets_if_needed.sh
@@ -30,7 +30,7 @@ NO_COLOR='\033[0m'
 md5sum=$(find package.json yarn.lock static/css static/js -type f | sort | xargs md5sum)
 old_md5sum=$(cat "${MD5SUM_FILE}" 2>/dev/null || true)
 if [[ ${old_md5sum} != "${md5sum}" ]]; then
-    if [[ ${START_AIRFLOW:="false"} == "true" && ${USE_AIRFLOW_VERSION:=} == "" ]]; then
+    if [[ ( ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" )  && ${USE_AIRFLOW_VERSION:=} == "" ]]; then
         echo
         echo -e "${YELLOW}Recompiling assets as they have changed and you need them for 'start_airflow' command${NO_COLOR}"
         echo
diff --git a/breeze b/breeze
index d04acd87a7..27ef65cb8d 100755
--- a/breeze
+++ b/breeze
@@ -1047,11 +1047,6 @@ function breeze::parse_arguments() {
             echo "Extras : ${AIRFLOW_EXTRAS}"
             shift 2
             ;;
-        --skip-installing-airflow-providers-from-sources)
-            export INSTALL_PROVIDERS_FROM_SOURCES="false"
-            echo "Install all Airflow Providers: false"
-            shift
-            ;;
         --additional-extras)
             export ADDITIONAL_AIRFLOW_EXTRAS="${2}"
             echo "Additional extras : ${ADDITIONAL_AIRFLOW_EXTRAS}"
@@ -2700,13 +2695,6 @@ ${FORMATTED_DEFAULT_PROD_EXTRAS}
 --image-tag TAG
         Additional tag in the image.
 
---skip-installing-airflow-providers-from-sources
-        By default 'pip install' in Airflow 2.0 installs only the provider packages that
-        are needed by the extras. When you build image during the development (which is
-        default in Breeze) all providers are installed by default from sources.
-        You can disable it by adding this flag but then you have to install providers from
-        wheel packages via --use-packages-from-dist flag.
-
 --disable-pypi-when-building
         Disable installing Airflow from pypi when building. If you use this flag and want
         to install Airflow, you have to install it from packages placed in
diff --git a/breeze-complete b/breeze-complete
index b6577aae06..ec26c5c9d0 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -194,7 +194,7 @@ postgres-version: mysql-version: mssql-version:
 version-suffix-for-pypi: version-suffix-for-svn:
 additional-extras: additional-python-deps: additional-dev-deps: additional-runtime-deps: image-tag:
 disable-mysql-client-installation disable-mssql-client-installation constraints-location: disable-pip-cache install-from-docker-context-files
-additional-extras: additional-python-deps: disable-pypi-when-building skip-installing-airflow-providers-from-sources
+additional-extras: additional-python-deps: disable-pypi-when-building
 dev-apt-deps: additional-dev-apt-deps: dev-apt-command: additional-dev-apt-command: additional-dev-apt-env:
 runtime-apt-deps: additional-runtime-apt-deps: runtime-apt-command: additional-runtime-apt-command: additional-runtime-apt-env:
 load-default-connections load-example-dags
diff --git a/dev/breeze/setup.cfg b/dev/breeze/setup.cfg
index 3408a5523b..0e5ab1b9cf 100644
--- a/dev/breeze/setup.cfg
+++ b/dev/breeze/setup.cfg
@@ -54,14 +54,15 @@ package_dir=
 packages = find:
 install_requires =
     click
+    inputimeout
     pendulum
+    psutil
     pytest
     pytest-xdist
+    pyyaml
+    requests
     rich
     rich_click
-    requests
-    psutil
-    inputimeout
 
 [options.packages.find]
 where=src
diff --git a/dev/breeze/src/airflow_breeze/branch_defaults.py b/dev/breeze/src/airflow_breeze/branch_defaults.py
index 153981c0d2..f4903cd077 100644
--- a/dev/breeze/src/airflow_breeze/branch_defaults.py
+++ b/dev/breeze/src/airflow_breeze/branch_defaults.py
@@ -14,6 +14,27 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""
+Default configuration for this branch. Those two variables below
+should be the only one that should be changed when we branch off
+different airflow branch.
+
+This file is different in every branch (`main`, `vX_Y_test' of airflow)
+The _stable branches have the same values as _test branches.
+
+Examples:
+
+    main:
+
+        AIRFLOW_BRANCH = "main"
+        DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH = "constraints-main"
+
+    v2-2-test:
+
+        AIRFLOW_BRANCH = "v2-2-test"
+        DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH = "constraints-2-2"
+
+"""
 
 AIRFLOW_BRANCH = "main"
 DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH = "constraints-main"
diff --git a/dev/breeze/src/airflow_breeze/breeze.py b/dev/breeze/src/airflow_breeze/breeze.py
index e6ea13fcd4..b0947475f5 100755
--- a/dev/breeze/src/airflow_breeze/breeze.py
+++ b/dev/breeze/src/airflow_breeze/breeze.py
@@ -19,77 +19,414 @@ import os
 import shutil
 import subprocess
 import sys
+from dataclasses import dataclass
 from pathlib import Path
-from typing import Optional, Tuple
+from typing import List, Optional, Tuple
+
+from airflow_breeze.shell.shell_params import ShellParams
 
 try:
+    # We handle ImportError so that click autocomplete works
     import rich_click as click
+
+    click.rich_click.SHOW_METAVARS_COLUMN = False
+    click.rich_click.APPEND_METAVARS_HELP = True
+    click.rich_click.STYLE_ERRORS_SUGGESTION = "bright_blue italic"
+    click.rich_click.ERRORS_SUGGESTION = "\nTry running the '--help' flag for more information.\n"
+    click.rich_click.ERRORS_EPILOGUE = (
+        "\nTo find out more, visit [bright_blue]https://github.com/apache/airflow/blob/main/BREEZE.rst[/]\n"
+    )
+    click.rich_click.OPTION_GROUPS = {
+        "Breeze2": [
+            {
+                "name": "Basic flags for the default (shell) command",
+                "options": [
+                    "--python",
+                    "--backend",
+                    "--use-airflow-version",
+                    "--postgres-version",
+                    "--mysql-version",
+                    "--mssql-version",
+                    "--forward-credentials",
+                    "--db-reset",
+                ],
+            },
+            {
+                "name": "Advanced flags for the default (shell) command",
+                "options": [
+                    "--force-build",
+                    "--mount-sources",
+                    "--integration",
+                ],
+            },
+        ],
+        "Breeze2 shell": [
+            {
+                "name": "Basic flags",
+                "options": [
+                    "--python",
+                    "--backend",
+                    "--use-airflow-version",
+                    "--postgres-version",
+                    "--mysql-version",
+                    "--mssql-version",
+                    "--forward-credentials",
+                    "--db-reset",
+                ],
+            },
+            {
+                "name": "Advanced flag for running",
+                "options": [
+                    "--force-build",
+                    "--mount-sources",
+                    "--integration",
+                ],
+            },
+        ],
+        "Breeze2 start-airflow": [
+            {
+                "name": "Basic flags",
+                "options": [
+                    "--python",
+                    "--backend",
+                    "--use-airflow-version",
+                    "--postgres-version",
+                    "--mysql-version",
+                    "--mssql-version",
+                    "--load-example-dags",
+                    "--load-default-connections",
+                    "--forward-credentials",
+                    "--db-reset",
+                ],
+            },
+            {
+                "name": "Advanced flag for running",
+                "options": [
+                    "--force-build",
+                    "--mount-sources",
+                    "--integration",
+                ],
+            },
+        ],
+        "Breeze2 build-image": [
+            {
+                "name": "Basic usage",
+                "options": [
+                    "--python",
+                    "--upgrade-to-newer-dependencies",
+                    "--debian-version",
+                    "--image-tag",
+                    "--docker-cache",
+                    "--github-repository",
+                ],
+            },
+            {
+                "name": "Advanced options (for power users)",
+                "options": [
+                    "--install-providers-from-sources",
+                    "--additional-extras",
+                    "--additional-dev-apt-deps",
+                    "--additional-runtime-apt-deps",
+                    "--additional-python-deps",
+                    "--additional-dev-apt-command",
+                    "--runtime-apt-command",
+                    "--additional-dev-apt-env",
+                    "--additional-runtime-apt-env",
+                    "--additional-runtime-apt-command",
+                    "--dev-apt-command",
+                    "--dev-apt-deps",
+                    "--runtime-apt-deps",
+                ],
+            },
+            {
+                "name": "Preparing cache (for maintainers)",
+                "options": [
+                    "--platform",
+                    "--prepare-buildx-cache",
+                ],
+            },
+        ],
+        "Breeze2 build-prod-image": [
+            {
+                "name": "Basic usage",
+                "options": [
+                    "--python",
+                    "--install-airflow-version",
+                    "--upgrade-to-newer-dependencies",
+                    "--debian-version",
+                    "--image-tag",
+                    "--docker-cache",
+                    "--github-repository",
+                ],
+            },
+            {
+                "name": "Options for customizing images",
+                "options": [
+                    "--install-providers-from-sources",
+                    "--extras",
+                    "--additional-extras",
+                    "--additional-dev-apt-deps",
+                    "--additional-runtime-apt-deps",
+                    "--additional-python-deps",
+                    "--additional-dev-apt-command",
+                    "--runtime-apt-command",
+                    "--additional-dev-apt-env",
+                    "--additional-runtime-apt-env",
+                    "--additional-runtime-apt-command",
+                    "--dev-apt-command",
+                    "--dev-apt-deps",
+                    "--runtime-apt-deps",
+                ],
+            },
+            {
+                "name": "Customization options (for specific customization needs)",
+                "options": [
+                    "--install-from-docker-context-files",
+                    "--cleanup-docker-context-files",
+                    "--disable-mysql-client-installation",
+                    "--disable-mssql-client-installation",
+                    "--disable-postgres-client-installation",
+                    "--disable-airflow-repo-cache",
+                    "--disable-pypi",
+                    "--install-airflow-reference",
+                    "--installation-method",
+                ],
+            },
+            {
+                "name": "Preparing cache (for maintainers)",
+                "options": [
+                    "--platform",
+                    "--prepare-buildx-cache",
+                ],
+            },
+        ],
+        "Breeze2 static-check": [
+            {
+                "name": "Pre-commit flags",
+                "options": [
+                    "--type",
+                    "--files",
+                    "--all-files",
+                    "--show-diff-on-failure",
+                    "--last-commit",
+                ],
+            },
+        ],
+        "Breeze2 build-docs": [
+            {
+                "name": "Doc flags",
+                "options": [
+                    "--docs-only",
+                    "--spellcheck-only",
+                    "--package-filter",
+                ],
+            },
+        ],
+        "Breeze2 stop": [
+            {
+                "name": "Stop flags",
+                "options": [
+                    "--preserve-volumes",
+                ],
+            },
+        ],
+        "Breeze2 setup-autocomplete": [
+            {
+                "name": "Setup autocomplete flags",
+                "options": [
+                    "--force-setup",
+                ],
+            },
+        ],
+        "Breeze2 config": [
+            {
+                "name": "Config flags",
+                "options": [
+                    "--python",
+                    "--backend",
+                    "--cheatsheet",
+                    "--asciiart",
+                ],
+            },
+        ],
+    }
+
+    click.rich_click.COMMAND_GROUPS = {
+        "Breeze2": [
+            {
+                "name": "Developer tools",
+                "commands": [
+                    "shell",
+                    "start-airflow",
+                    "stop",
+                    "build-image",
+                    "build-prod-image",
+                    "build-docs",
+                    "static-check",
+                ],
+            },
+            {
+                "name": "Configuration & maintenance",
+                "commands": ["cleanup", "setup-autocomplete", "config", "version"],
+            },
+        ]
+    }
+
+
 except ImportError:
-    # We handle import errors so that click autocomplete works
     import click  # type: ignore[no-redef]
 
-from click import ClickException
+from click import Context
 
-from airflow_breeze.cache import delete_cache, touch_cache_file, write_to_cache_file
-from airflow_breeze.ci.build_image import build_image
-from airflow_breeze.ci.build_params import BuildParams
-from airflow_breeze.console import console
-from airflow_breeze.docs_generator import build_documentation
-from airflow_breeze.docs_generator.doc_builder import DocBuilder
+from airflow_breeze.build_image.ci.build_ci_image import build_image
+from airflow_breeze.build_image.ci.build_ci_params import BuildCiParams
+from airflow_breeze.build_image.prod.build_prod_image import build_production_image
 from airflow_breeze.global_constants import (
     ALLOWED_BACKENDS,
+    ALLOWED_BUILD_CACHE,
     ALLOWED_DEBIAN_VERSIONS,
     ALLOWED_EXECUTORS,
-    ALLOWED_INSTALL_AIRFLOW_VERSIONS,
+    ALLOWED_INSTALLATION_METHODS,
     ALLOWED_INTEGRATIONS,
+    ALLOWED_MOUNT_OPTIONS,
     ALLOWED_MSSQL_VERSIONS,
     ALLOWED_MYSQL_VERSIONS,
+    ALLOWED_PLATFORMS,
     ALLOWED_POSTGRES_VERSIONS,
     ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS,
-    ALLOWED_USE_AIRFLOW_VERSIONS,
+    MOUNT_SELECTED,
     get_available_packages,
 )
 from airflow_breeze.pre_commit_ids import PRE_COMMIT_LIST
-from airflow_breeze.prod.build_prod_image import build_production_image
-from airflow_breeze.shell.enter_shell import build_shell
-from airflow_breeze.utils.docker_command_utils import check_docker_resources
+from airflow_breeze.shell.enter_shell import enter_shell
+from airflow_breeze.utils.cache import delete_cache, touch_cache_file, write_to_cache_file
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.docker_command_utils import (
+    check_docker_resources,
+    construct_env_variables_docker_compose_command,
+    get_extra_docker_flags,
+)
 from airflow_breeze.utils.path_utils import (
-    __AIRFLOW_SOURCES_ROOT,
+    AIRFLOW_SOURCES_ROOT,
+    BUILD_CACHE_DIR,
     create_directories,
     find_airflow_sources_root,
-    get_airflow_sources_root,
 )
-from airflow_breeze.utils.run_utils import check_package_installed, run_command
-from airflow_breeze.visuals import ASCIIART, ASCIIART_STYLE
-
-AIRFLOW_SOURCES_DIR = Path(__file__).resolve().parent.parent.parent.parent.parent.absolute()
+from airflow_breeze.utils.run_utils import check_pre_commit_installed, run_command
+from airflow_breeze.utils.visuals import ASCIIART, ASCIIART_STYLE
 
 NAME = "Breeze2"
 VERSION = "0.0.1"
 
-
-@click.group()
-def main():
-    find_airflow_sources_root()
-
+find_airflow_sources_root()
 
 option_verbose = click.option(
-    "-v", "--verbose", is_flag=True, help="Print verbose information about performed steps", envvar='VERBOSE'
+    "-v", "--verbose", is_flag=True, help="Print verbose information about performed steps.", envvar='VERBOSE'
 )
 
-option_python_version = click.option(
+option_dry_run = click.option(
+    "-D",
+    "--dry-run",
+    is_flag=True,
+    help="If dry-run is set, commands are only printed, not executed.",
+    envvar='DRY_RUN',
+)
+
+
+option_python = click.option(
     '-p',
     '--python',
     type=click.Choice(ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS),
-    help='Choose your python version',
+    help='Python version to use.',
     envvar='PYTHON_MAJOR_MINOR_VERSION',
 )
 
 option_backend = click.option(
     '-b',
     '--backend',
+    help="Database backend to use.",
     type=click.Choice(ALLOWED_BACKENDS),
-    help='Choose your backend database',
+)
+
+option_integration = click.option(
+    '--integration',
+    help="Integration(s) to enable when running (can be more than one).",
+    type=click.Choice(ALLOWED_INTEGRATIONS),
+    multiple=True,
+)
+
+option_postgres_version = click.option(
+    '-P', '--postgres-version', help="Version of Postgres.", type=click.Choice(ALLOWED_POSTGRES_VERSIONS)
+)
+
+option_mysql_version = click.option(
+    '-M', '--mysql-version', help="Version of MySQL.", type=click.Choice(ALLOWED_MYSQL_VERSIONS)
+)
+
+option_mssql_version = click.option(
+    '-S', '--mssql-version', help="Version of MsSQL.", type=click.Choice(ALLOWED_MSSQL_VERSIONS)
+)
+
+option_executor = click.option(
+    '--executor',
+    help='Executor to use in a kubernetes cluster. Default is KubernetesExecutor.',
+    type=click.Choice(ALLOWED_EXECUTORS),
+)
+
+option_forward_credentials = click.option(
+    '-f', '--forward-credentials', help="Forward local credentials to container when running.", is_flag=True
+)
+
+option_use_airflow_version = click.option(
+    '-a',
+    '--use-airflow-version',
+    help="Use (reinstall) specified Airflow version after entering the container.",
+    envvar='USE_AIRFLOW_VERSION',
+)
+
+option_mount_sources = click.option(
+    '--mount-sources',
+    type=click.Choice(ALLOWED_MOUNT_OPTIONS),
+    default=ALLOWED_MOUNT_OPTIONS[0],
+    help="Choose which local sources should be mounted (default = selected)",
+)
+
+option_force_build = click.option('--force-build', help="Force image build before running.", is_flag=True)
+
+option_db_reset = click.option(
+    '-d',
+    '--db-reset',
+    help="Resets DB when entering the container.",
+    is_flag=True,
+    envvar='DB_RESET',
+)
+
+
+@click.group(invoke_without_command=True, context_settings={'help_option_names': ['-h', '--help']})
+@option_verbose
+@option_dry_run
+@option_python
+@option_backend
+@option_postgres_version
+@option_mysql_version
+@option_mssql_version
+@option_forward_credentials
+@option_force_build
+@option_use_airflow_version
+@option_mount_sources
+@option_integration
+@option_db_reset
+@click.pass_context
+def main(ctx: Context, **kwargs):
+    if not ctx.invoked_subcommand:
+        ctx.forward(shell, extra_args={})
+
+
+option_docker_cache = click.option(
+    '-c',
+    '--docker-cache',
+    help='Cache option for image used during the build.',
+    type=click.Choice(ALLOWED_BUILD_CACHE),
 )
 
 option_github_repository = click.option(
@@ -106,10 +443,15 @@ option_github_image_id = click.option(
     Breeze can automatically pull the commit SHA id specified Default: latest',
 )
 
-option_image_tag = click.option('--image-tag', help='Additional tag in the image.')
+option_image_tag = click.option(
+    '-t', '--image-tag', help='Set tag for the image (additionally to default Airflow convention).'
+)
 
 option_platform = click.option(
-    '--platform', help='Builds image for the platform specified.', envvar='PLATFORM'
+    '--platform',
+    help='Builds image for the platform specified.',
+    envvar='PLATFORM',
+    type=click.Choice(ALLOWED_PLATFORMS),
 )
 
 option_debian_version = click.option(
@@ -120,8 +462,9 @@ option_debian_version = click.option(
     envvar='DEBIAN_VERSION',
 )
 option_upgrade_to_newer_dependencies = click.option(
+    "-u",
     '--upgrade-to-newer-dependencies',
-    help='Upgrades PIP packages to latest versions available without looking at the constraints.',
+    help='If set to anything else than false, upgrades PIP packages to latest versions available.',
     envvar='UPGRADE_TO_NEWER_DEPENDENCIES',
 )
 option_additional_extras = click.option(
@@ -184,11 +527,43 @@ option_runtime_apt_deps = click.option(
     help='The basic apt runtime dependencies to use when building the images.',
     envvar='RUNTIME_APT_DEPS',
 )
-option_ci_flag = click.option(
-    '--ci',
-    help='Enabling this option will off the pip progress bar',
+
+option_skip_rebuild_check = click.option(
+    '-r',
+    '--skip-rebuild-check',
+    help="Skips checking if rebuild is needed",
+    is_flag=True,
+    envvar='SKIP_REBUILD_CHECK',
+)
+
+option_prepare_buildx_cache = click.option(
+    '--prepare-buildx-cache',
+    help='Prepares build cache rather than build images locally.',
+    is_flag=True,
+    envvar='PREPARE_BUILDX_CACHE',
+)
+
+option_install_providers_from_sources = click.option(
+    '--install-providers-from-sources',
+    help="Install providers from sources when installing.",
+    is_flag=True,
+    envvar='INSTALL_PROVIDERS_FROM_SOURCES',
+)
+
+option_load_example_dags = click.option(
+    '-e',
+    '--load-example-dags',
+    help="Enable configuration to load example DAGs when starting Airflow.",
+    is_flag=True,
+    envvar='LOAD_EXAMPLES',
+)
+
+option_load_default_connection = click.option(
+    '-c',
+    '--load-default-connections',
+    help="Enable configuration to load default connections when starting Airflow.",
     is_flag=True,
-    envvar='CI',
+    envvar='LOAD_DEFAULT_CONNECTIONS',
 )
 
 
@@ -199,82 +574,136 @@ def version():
     console.print(f"\n[green]{NAME} version: {VERSION}[/]\n")
 
 
+# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+# Make sure that whatever you add here as an option is also
+# Added in the "main" command above. The min command above
+# Is used for a shorthand of shell and except the extra
+# Args it should have the same parameters.
+# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+@main.command()
 @option_verbose
-@main.command(
-    context_settings=dict(
-        ignore_unknown_options=True,
-        allow_extra_args=True,
-    ),
-)
-@option_python_version
+@option_dry_run
+@option_python
 @option_backend
-@click.option('--integration', type=click.Choice(ALLOWED_INTEGRATIONS), multiple=True)
-@click.option('-L', '--build-cache-local', is_flag=True)
-@click.option('-U', '--build-cache-pulled', is_flag=True)
-@click.option('-X', '--build-cache-disabled', is_flag=True)
-@click.option('--postgres-version', type=click.Choice(ALLOWED_POSTGRES_VERSIONS))
-@click.option('--mysql-version', type=click.Choice(ALLOWED_MYSQL_VERSIONS))
-@click.option('--mssql-version', type=click.Choice(ALLOWED_MSSQL_VERSIONS))
-@click.option(
-    '--executor',
-    type=click.Choice(ALLOWED_EXECUTORS),
-    help='Executor to use in a kubernetes cluster. Default is KubernetesExecutor',
-)
-@click.option('-f', '--forward-credentials', is_flag=True)
-@click.option('-l', '--skip-mounting-local-sources', is_flag=True)
-@click.option('--use-airflow-version', type=click.Choice(ALLOWED_INSTALL_AIRFLOW_VERSIONS))
-@click.option('--use-packages-from-dist', is_flag=True)
-@click.option('--force-build', is_flag=True)
+@option_postgres_version
+@option_mysql_version
+@option_mssql_version
+@option_forward_credentials
+@option_force_build
+@option_use_airflow_version
+@option_mount_sources
+@option_integration
+@option_db_reset
 @click.argument('extra-args', nargs=-1, type=click.UNPROCESSED)
 def shell(
     verbose: bool,
+    dry_run: bool,
     python: str,
     backend: str,
     integration: Tuple[str],
-    build_cache_local: bool,
-    build_cache_pulled: bool,
-    build_cache_disabled: bool,
     postgres_version: str,
     mysql_version: str,
     mssql_version: str,
-    executor: str,
     forward_credentials: bool,
-    skip_mounting_local_sources: bool,
+    mount_sources: str,
     use_airflow_version: str,
-    use_packages_from_dist: bool,
     force_build: bool,
+    db_reset: bool,
     extra_args: Tuple,
 ):
     """Enters breeze.py environment. this is the default command use when no other is selected."""
 
     if verbose:
         console.print("\n[green]Welcome to breeze.py[/]\n")
-        console.print(f"\n[green]Root of Airflow Sources = {__AIRFLOW_SOURCES_ROOT}[/]\n")
-    build_shell(
-        verbose,
-        python_version=python,
+        console.print(f"\n[green]Root of Airflow Sources = {AIRFLOW_SOURCES_ROOT}[/]\n")
+    enter_shell(
+        verbose=verbose,
+        dry_run=dry_run,
+        python=python,
+        backend=backend,
+        integration=integration,
+        postgres_version=postgres_version,
+        mysql_version=mysql_version,
+        mssql_version=mssql_version,
+        forward_credentials=str(forward_credentials),
+        mount_sources=mount_sources,
+        use_airflow_version=use_airflow_version,
+        force_build=force_build,
+        db_reset=db_reset,
+        extra_args=extra_args,
+    )
+
+
+@option_verbose
+@main.command(name='start-airflow')
+@option_dry_run
+@option_python
+@option_backend
+@option_postgres_version
+@option_load_example_dags
+@option_load_default_connection
+@option_mysql_version
+@option_mssql_version
+@option_forward_credentials
+@option_force_build
+@option_use_airflow_version
+@option_mount_sources
+@option_integration
+@option_db_reset
+@click.argument('extra-args', nargs=-1, type=click.UNPROCESSED)
+def start_airflow(
+    verbose: bool,
+    dry_run: bool,
+    python: str,
+    backend: str,
+    integration: Tuple[str],
+    postgres_version: str,
+    load_example_dags: bool,
+    load_default_connections: bool,
+    mysql_version: str,
+    mssql_version: str,
+    forward_credentials: bool,
+    mount_sources: str,
+    use_airflow_version: str,
+    force_build: bool,
+    db_reset: bool,
+    extra_args: Tuple,
+):
+    """Enters breeze.py environment and starts all Airflow components in the tmux session."""
+    enter_shell(
+        verbose=verbose,
+        dry_run=dry_run,
+        python=python,
         backend=backend,
         integration=integration,
-        build_cache_local=build_cache_local,
-        build_cache_disabled=build_cache_disabled,
-        build_cache_pulled=build_cache_pulled,
         postgres_version=postgres_version,
+        load_default_connections=load_default_connections,
+        load_example_dags=load_example_dags,
         mysql_version=mysql_version,
         mssql_version=mssql_version,
-        executor=executor,
         forward_credentials=str(forward_credentials),
-        skip_mounting_local_sources=skip_mounting_local_sources,
+        mount_sources=mount_sources,
         use_airflow_version=use_airflow_version,
-        use_packages_from_dist=use_packages_from_dist,
         force_build=force_build,
+        db_reset=db_reset,
+        start_airflow=True,
         extra_args=extra_args,
     )
 
 
+@main.command(name='build-image')
 @option_verbose
-@main.command(name='build-ci-image')
+@option_dry_run
+@option_python
+@option_upgrade_to_newer_dependencies
+@option_platform
+@option_debian_version
+@option_github_repository
+@option_docker_cache
+@option_image_tag
+@option_prepare_buildx_cache
+@option_install_providers_from_sources
 @option_additional_extras
-@option_python_version
 @option_additional_dev_apt_deps
 @option_additional_runtime_apt_deps
 @option_additional_python_deps
@@ -287,17 +716,12 @@ def shell(
 @option_dev_apt_deps
 @option_runtime_apt_command
 @option_runtime_apt_deps
-@option_github_repository
-@click.option('--build-cache', help='Cache option')
-@option_platform
-@option_debian_version
-@click.option('--prepare-buildx-cache', is_flag=True)
-@option_ci_flag
-@option_upgrade_to_newer_dependencies
 def build_ci_image(
     verbose: bool,
+    dry_run: bool,
     additional_extras: Optional[str],
     python: str,
+    image_tag: Optional[str],
     additional_dev_apt_deps: Optional[str],
     additional_runtime_apt_deps: Optional[str],
     additional_python_deps: Optional[str],
@@ -307,28 +731,29 @@ def build_ci_image(
     additional_runtime_apt_env: Optional[str],
     dev_apt_command: Optional[str],
     dev_apt_deps: Optional[str],
+    install_providers_from_sources: bool,
     runtime_apt_command: Optional[str],
     runtime_apt_deps: Optional[str],
     github_repository: Optional[str],
-    build_cache: Optional[str],
+    docker_cache: Optional[str],
     platform: Optional[str],
     debian_version: Optional[str],
     prepare_buildx_cache: bool,
-    ci: bool,
     upgrade_to_newer_dependencies: str = "false",
 ):
-    """Builds docker CI image without entering the container."""
+    """Builds docker CI image."""
 
     if verbose:
         console.print(
-            f"\n[blue]Building image of airflow from {__AIRFLOW_SOURCES_ROOT} "
+            f"\n[bright_blue]Building image of airflow from {AIRFLOW_SOURCES_ROOT} "
             f"python version: {python}[/]\n"
         )
-    create_directories()
     build_image(
-        verbose,
+        verbose=verbose,
+        dry_run=dry_run,
         additional_extras=additional_extras,
-        python_version=python,
+        python=python,
+        image_tag=image_tag,
         additional_dev_apt_deps=additional_dev_apt_deps,
         additional_runtime_apt_deps=additional_runtime_apt_deps,
         additional_python_deps=additional_python_deps,
@@ -336,42 +761,65 @@ def build_ci_image(
         additional_dev_apt_command=additional_dev_apt_command,
         additional_dev_apt_env=additional_dev_apt_env,
         additional_runtime_apt_env=additional_runtime_apt_env,
+        install_providers_from_sources=install_providers_from_sources,
         dev_apt_command=dev_apt_command,
         dev_apt_deps=dev_apt_deps,
         runtime_apt_command=runtime_apt_command,
         runtime_apt_deps=runtime_apt_deps,
         github_repository=github_repository,
-        docker_cache=build_cache,
+        docker_cache=docker_cache,
         platform=platform,
         debian_version=debian_version,
         prepare_buildx_cache=prepare_buildx_cache,
-        ci=ci,
         upgrade_to_newer_dependencies=upgrade_to_newer_dependencies,
     )
 
 
 @option_verbose
+@option_dry_run
 @main.command(name='build-prod-image')
+@option_python
+@option_upgrade_to_newer_dependencies
+@option_platform
+@option_debian_version
+@option_github_repository
+@option_docker_cache
+@option_image_tag
+@option_prepare_buildx_cache
+@click.option(
+    '--installation-method',
+    help="Whether to install airflow from sources ('.') or PyPI ('apache-airflow')",
+    type=click.Choice(ALLOWED_INSTALLATION_METHODS),
+)
+@option_install_providers_from_sources
+@click.option(
+    '--install-from-docker-context-files',
+    help='Install wheels from local docker-context-files when building image',
+    is_flag=True,
+)
 @click.option(
-    '--cleanup-docker-context-files', help='Preserves data volumes when stopping airflow.', is_flag=True
-)
-@click.option('--disable-mysql-client-installation', is_flag=True)
-@click.option('--disable-mssql-client-installation', is_flag=True)
-@click.option('--disable-postgres-client-installation', is_flag=True)
-@click.option('--disable-pip-cache', is_flag=True)
-@click.option('-t', '--install-airflow-reference')
-@click.option('-a', '--install-airflow-version', type=click.Choice(ALLOWED_INSTALL_AIRFLOW_VERSIONS))
-@click.option('-r', '--skip-rebuild-check', is_flag=True)
-@click.option('-L', '--build-cache-local', is_flag=True)
-@click.option('-U', '--build-cache-pulled', is_flag=True)
-@click.option('-X', '--build-cache-disabled', is_flag=True)
+    '--cleanup-docker-context-files',
+    help='Cleans up docker context files before running build.',
+    is_flag=True,
+)
+@click.option('--extras', help="Extras to install by default")
+@click.option('--disable-mysql-client-installation', help="Do not install MySQL client", is_flag=True)
+@click.option('--disable-mssql-client-installation', help="Do not install MsSQl client", is_flag=True)
+@click.option('--disable-postgres-client-installation', help="Do not install Postgres client", is_flag=True)
+@click.option(
+    '--disable-airflow-repo-cache', help="Disable cache from Airflow repository during building", is_flag=True
+)
+@click.option('--disable-pypi', help="Disable pypi during building", is_flag=True)
+@click.option(
+    '--install-airflow-reference',
+    help="Install airflow using specified reference (tag/branch) from GitHub",
+)
+@click.option('-a', '--install-airflow-version', help="Install specified version of airflow")
 @option_additional_extras
-@option_python_version
 @option_additional_dev_apt_deps
 @option_additional_runtime_apt_deps
 @option_additional_python_deps
 @option_additional_dev_apt_command
-@option_runtime_apt_command
 @option_additional_dev_apt_env
 @option_additional_runtime_apt_env
 @option_additional_runtime_apt_command
@@ -379,38 +827,21 @@ def build_ci_image(
 @option_dev_apt_deps
 @option_runtime_apt_command
 @option_runtime_apt_deps
-@option_github_repository
-@option_platform
-@option_debian_version
-@option_upgrade_to_newer_dependencies
-@click.option('--prepare-buildx-cache', is_flag=True)
-@click.option('--skip-installing-airflow-providers-from-sources', is_flag=True)
-@click.option('--disable-pypi-when-building', is_flag=True)
-@click.option('-E', '--extras')
-@click.option('--installation-method', type=click.Choice(ALLOWED_USE_AIRFLOW_VERSIONS))
-@click.option(
-    '--install-from-docker-context-files',
-    help='Install wheels from local docker-context-files when building image',
-    is_flag=True,
-)
-@option_image_tag
-@click.option('--github-token', envvar='GITHUB_TOKEN')
-@option_ci_flag
 def build_prod_image(
     verbose: bool,
+    dry_run: bool,
     cleanup_docker_context_files: bool,
     disable_mysql_client_installation: bool,
     disable_mssql_client_installation: bool,
     disable_postgres_client_installation: bool,
-    disable_pip_cache: bool,
+    disable_airflow_repo_cache: bool,
+    disable_pypi: bool,
     install_airflow_reference: Optional[str],
     install_airflow_version: Optional[str],
-    skip_rebuild_check: bool,
-    build_cache_local: bool,
-    build_cache_pulled: bool,
-    build_cache_disabled: bool,
+    docker_cache: str,
     additional_extras: Optional[str],
     python: str,
+    image_tag: Optional[str],
     additional_dev_apt_deps: Optional[str],
     additional_runtime_apt_deps: Optional[str],
     additional_python_deps: Optional[str],
@@ -426,37 +857,32 @@ def build_prod_image(
     platform: Optional[str],
     debian_version: Optional[str],
     prepare_buildx_cache: bool,
-    skip_installing_airflow_providers_from_sources: bool,
-    disable_pypi_when_building: bool,
+    install_providers_from_sources: bool,
     extras: Optional[str],
     installation_method: Optional[str],
     install_from_docker_context_files: bool,
-    image_tag: Optional[str],
-    github_token: Optional[str],
-    ci: bool,
     upgrade_to_newer_dependencies: str = "false",
 ):
-    """Builds docker Production image without entering the container."""
+    """Builds docker Production image."""
     if verbose:
-        console.print("\n[blue]Building image[/]\n")
+        console.print("\n[bright_blue]Building image[/]\n")
     if prepare_buildx_cache:
-        build_cache_pulled = True
+        docker_cache = "pulled"
         cleanup_docker_context_files = True
     build_production_image(
         verbose,
+        dry_run,
         cleanup_docker_context_files=cleanup_docker_context_files,
         disable_mysql_client_installation=disable_mysql_client_installation,
         disable_mssql_client_installation=disable_mssql_client_installation,
         disable_postgres_client_installation=disable_postgres_client_installation,
-        disable_pip_cache=disable_pip_cache,
+        disable_airflow_repo_cache=disable_airflow_repo_cache,
+        disable_pypi=disable_pypi,
         install_airflow_reference=install_airflow_reference,
         install_airflow_version=install_airflow_version,
-        skip_rebuild_check=skip_rebuild_check,
-        build_cache_local=build_cache_local,
-        build_cache_pulled=build_cache_pulled,
-        build_cache_disabled=build_cache_disabled,
+        docker_cache=docker_cache,
         additional_extras=additional_extras,
-        python_version=python,
+        python=python,
         additional_dev_apt_deps=additional_dev_apt_deps,
         additional_runtime_apt_deps=additional_runtime_apt_deps,
         additional_python_deps=additional_python_deps,
@@ -473,27 +899,14 @@ def build_prod_image(
         debian_version=debian_version,
         upgrade_to_newer_dependencies=upgrade_to_newer_dependencies,
         prepare_buildx_cache=prepare_buildx_cache,
-        skip_installing_airflow_providers_from_sources=skip_installing_airflow_providers_from_sources,
-        disable_pypi_when_building=disable_pypi_when_building,
+        install_providers_from_sources=install_providers_from_sources,
         extras=extras,
         installation_method=installation_method,
         install_docker_context_files=install_from_docker_context_files,
         image_tag=image_tag,
-        github_token=github_token,
-        ci=ci,
     )
 
 
-@option_verbose
-@main.command(name='start-airflow')
-def start_airflow(verbose: bool):
-    """Enters breeze.py environment and set up the tmux session"""
-    if verbose:
-        console.print("\n[green]Welcome to breeze.py[/]\n")
-    console.print(ASCIIART, style=ASCIIART_STYLE)
-    raise ClickException("\nPlease implement entering breeze.py\n")
-
-
 BREEZE_COMMENT = "Added by Updated Airflow Breeze autocomplete setup"
 START_LINE = f"# START: {BREEZE_COMMENT}\n"
 END_LINE = f"# END: {BREEZE_COMMENT}\n"
@@ -519,7 +932,7 @@ def backup(script_path_file: Path):
     shutil.copy(str(script_path_file), str(script_path_file) + ".bak")
 
 
-def write_to_shell(command_to_execute: str, script_path: str, force_setup: bool) -> bool:
+def write_to_shell(command_to_execute: str, dry_run: bool, script_path: str, force_setup: bool) -> bool:
     skip_check = False
     script_path_file = Path(script_path)
     if not script_path_file.exists():
@@ -528,7 +941,7 @@ def write_to_shell(command_to_execute: str, script_path: str, force_setup: bool)
         if BREEZE_COMMENT in script_path_file.read_text():
             if not force_setup:
                 console.print(
-                    "\n[yellow]Autocompletion is already setup. Skipping. "
+                    "\n[bright_yellow]Autocompletion is already setup. Skipping. "
                     "You can force autocomplete installation by adding --force-setup[/]\n"
                 )
                 return False
@@ -537,26 +950,36 @@ def write_to_shell(command_to_execute: str, script_path: str, force_setup: bool)
                 remove_autogenerated_code(script_path)
     console.print(f"\nModifying the {script_path} file!\n")
     console.print(f"\nCopy of the file is held in {script_path}.bak !\n")
-    backup(script_path_file)
+    if not dry_run:
+        backup(script_path_file)
     text = script_path_file.read_text()
-    script_path_file.write_text(
-        text + ("\n" if not text.endswith("\n") else "") + START_LINE + command_to_execute + "\n" + END_LINE
-    )
+    if not dry_run:
+        script_path_file.write_text(
+            text
+            + ("\n" if not text.endswith("\n") else "")
+            + START_LINE
+            + command_to_execute
+            + "\n"
+            + END_LINE
+        )
+    else:
+        console.print(f"[bright_blue]The autocomplete script would be added to {script_path}[/]")
     console.print(
-        "\n[yellow]Please exit and re-enter your shell or run:[/]\n\n" f"   `source {script_path}`\n"
+        f"\n[bright_yellow]Please exit and re-enter your shell or run:[/]\n\n   `source {script_path}`\n"
     )
     return True
 
 
 @option_verbose
+@option_dry_run
 @click.option(
     '-f',
     '--force-setup',
     is_flag=True,
-    help='Force autocomplete setup even if already setup before (overrides the setup).',
+    help='Force autocomplete setup even' 'if already setup before (overrides the setup).',
 )
 @main.command(name='setup-autocomplete')
-def setup_autocomplete(verbose: bool, force_setup: bool):
+def setup_autocomplete(verbose: bool, dry_run: bool, force_setup: bool):
     """
     Enables autocompletion of Breeze2 commands.
     """
@@ -569,29 +992,28 @@ def setup_autocomplete(verbose: bool, force_setup: bool):
         sys.exit(1)
     console.print(f"Installing {detected_shell} completion for local user")
     autocomplete_path = (
-        Path(AIRFLOW_SOURCES_DIR) / "dev" / "breeze" / "autocomplete" / f"{NAME}-complete-{detected_shell}.sh"
+        AIRFLOW_SOURCES_ROOT / "dev" / "breeze" / "autocomplete" / f"{NAME}-complete-{detected_shell}.sh"
     )
     console.print(f"[bright_blue]Activation command script is available here: {autocomplete_path}[/]\n")
     console.print(
-        f"[yellow]We need to add above script to your {detected_shell} profile and "
+        f"[bright_yellow]We need to add above script to your {detected_shell} profile and "
         "install 'click' package in your default python installation destination.[/]\n"
     )
-    updated = False
     if click.confirm("Should we proceed ?"):
         if detected_shell == 'bash':
             script_path = str(Path('~').expanduser() / '.bash_completion')
             command_to_execute = f"source {autocomplete_path}"
-            updated = write_to_shell(command_to_execute, script_path, force_setup)
+            updated = write_to_shell(command_to_execute, dry_run, script_path, force_setup)
         elif detected_shell == 'zsh':
             script_path = str(Path('~').expanduser() / '.zshrc')
             command_to_execute = f"source {autocomplete_path}"
-            updated = write_to_shell(command_to_execute, script_path, force_setup)
+            updated = write_to_shell(command_to_execute, dry_run, script_path, force_setup)
         elif detected_shell == 'fish':
             # Include steps for fish shell
             script_path = str(Path('~').expanduser() / f'.config/fish/completions/{NAME}.fish')
             if os.path.exists(script_path) and not force_setup:
                 console.print(
-                    "\n[yellow]Autocompletion is already setup. Skipping. "
+                    "\n[bright_yellow]Autocompletion is already setup. Skipping. "
                     "You can force autocomplete installation by adding --force-setup[/]\n"
                 )
             else:
@@ -606,9 +1028,9 @@ def setup_autocomplete(verbose: bool, force_setup: bool):
                 subprocess.check_output(['powershell', '-NoProfile', 'echo $profile']).decode("utf-8").strip()
             )
             command_to_execute = f". {autocomplete_path}"
-            write_to_shell(command_to_execute, script_path, force_setup)
+            write_to_shell(command_to_execute, dry_run, script_path, force_setup)
         if updated:
-            run_command(['pip', 'install', '--upgrade', 'click'], verbose=True, check=False)
+            run_command(['pip', 'install', '--upgrade', 'click'], verbose=True, dry_run=dry_run, check=False)
     else:
         console.print(
             "\nPlease follow the https://click.palletsprojects.com/en/8.1.x/shell-completion/ "
@@ -617,23 +1039,23 @@ def setup_autocomplete(verbose: bool, force_setup: bool):
 
 
 @main.command(name='config')
-@option_python_version
+@option_python
 @option_backend
-@click.option('--cheatsheet/--no-cheatsheet', default=None)
-@click.option('--asciiart/--no-asciiart', default=None)
+@click.option('-C/-c', '--cheatsheet/--no-cheatsheet', help="Enable/disable cheatsheet", default=None)
+@click.option('-A/-a', '--asciiart/--no-asciiart', help="Enable/disable ASCIIart", default=None)
 def change_config(python, backend, cheatsheet, asciiart):
     """
-    Toggles on/off cheatsheet, asciiart
+    Toggles on/off cheatsheet, asciiart. Sets default Python and backend.
     """
     if asciiart:
-        console.print('[blue] ASCIIART enabled')
+        console.print('[bright_blue] ASCIIART enabled')
         delete_cache('suppress_asciiart')
     elif asciiart is not None:
         touch_cache_file('suppress_asciiart')
     else:
         pass
     if cheatsheet:
-        console.print('[blue] Cheatsheet enabled')
+        console.print('[bright_blue] Cheatsheet enabled')
         delete_cache('suppress_cheatsheet')
     elif cheatsheet is not None:
         touch_cache_file('suppress_cheatsheet')
@@ -641,47 +1063,97 @@ def change_config(python, backend, cheatsheet, asciiart):
         pass
     if python is not None:
         write_to_cache_file('PYTHON_MAJOR_MINOR_VERSION', python)
-        console.print(f'[blue]Python cached_value {python}')
+        console.print(f'[bright_blue]Python cached_value {python}')
     if backend is not None:
         write_to_cache_file('BACKEND', backend)
-        console.print(f'[blue]Backend cached_value {backend}')
+        console.print(f'[bright_blue]Backend cached_value {backend}')
+
+
+@dataclass
+class DocParams:
+    package_filter: Tuple[str]
+    docs_only: bool
+    spellcheck_only: bool
+
+    @property
+    def args_doc_builder(self) -> List[str]:
+        doc_args = []
+        if self.docs_only:
+            doc_args.append("--docs-only")
+        if self.spellcheck_only:
+            doc_args.append("--spellcheck-only")
+        if self.package_filter and len(self.package_filter) > 0:
+            for single_filter in self.package_filter:
+                doc_args.extend(["--package-filter", single_filter])
+        return doc_args
 
 
-@option_verbose
 @main.command(name='build-docs')
-@click.option('--docs-only', is_flag=True)
-@click.option('--spellcheck-only', is_flag=True)
-@click.option('--package-filter', type=click.Choice(get_available_packages()), multiple=True)
-def build_docs(verbose: bool, docs_only: bool, spellcheck_only: bool, package_filter: Tuple[str]):
+@option_verbose
+@option_dry_run
+@click.option('-d', '--docs-only', help="Only build documentation", is_flag=True)
+@click.option('-s', '--spellcheck-only', help="Only run spell checking", is_flag=True)
+@click.option(
+    '-p',
+    '--package-filter',
+    help="List of packages to consider",
+    type=click.Choice(get_available_packages()),
+    multiple=True,
+)
+def build_docs(
+    verbose: bool, dry_run: bool, docs_only: bool, spellcheck_only: bool, package_filter: Tuple[str]
+):
     """
-    Builds documentation in the container
+    Builds documentation in the container.
+
+    * figures out CI image name
+    * checks if there are enough resources
+    * converts parameters into a DocParams class
     """
-    params = BuildParams()
-    airflow_sources = str(get_airflow_sources_root())
-    ci_image_name = params.airflow_ci_image_name
-    check_docker_resources(verbose, airflow_sources, ci_image_name)
-    doc_builder = DocBuilder(
-        package_filter=package_filter, docs_only=docs_only, spellcheck_only=spellcheck_only
+    params = BuildCiParams()
+    ci_image_name = params.airflow_image_name
+    check_docker_resources(verbose, ci_image_name)
+    doc_builder = DocParams(
+        package_filter=package_filter,
+        docs_only=docs_only,
+        spellcheck_only=spellcheck_only,
     )
-    build_documentation.build(verbose, airflow_sources, ci_image_name, doc_builder)
+    extra_docker_flags = get_extra_docker_flags(MOUNT_SELECTED)
+    cmd = []
+    cmd.extend(["docker", "run"])
+    cmd.extend(extra_docker_flags)
+    cmd.extend(["-t", "-e", "GITHUB_ACTIONS="])
+    cmd.extend(["--entrypoint", "/usr/local/bin/dumb-init", "--pull", "never"])
+    cmd.extend([ci_image_name, "--", "/opt/airflow/scripts/in_container/run_docs_build.sh"])
+    cmd.extend(doc_builder.args_doc_builder)
+    run_command(cmd, verbose=verbose, dry_run=dry_run, text=True)
 
 
-@option_verbose
 @main.command(
     name="static-check",
+    help="Run static checks.",
     context_settings=dict(
         ignore_unknown_options=True,
         allow_extra_args=True,
     ),
 )
-@click.option('--all-files', is_flag=True)
-@click.option('--show-diff-on-failure', is_flag=True)
-@click.option('--last-commit', is_flag=True)
-@click.option('-t', '--type', type=click.Choice(PRE_COMMIT_LIST), multiple=True)
-@click.option('--files', is_flag=True)
+@click.option(
+    '-t',
+    '--type',
+    help="Type(s) of the static checks to run",
+    type=click.Choice(PRE_COMMIT_LIST),
+    multiple=True,
+)
+@click.option('-a', '--all-files', help="Run checks on all files", is_flag=True)
+@click.option('-f', '--files', help="List of files to run the checks on", multiple=True)
+@click.option('-s', '--show-diff-on-failure', help="Show diff for files modified by the checks", is_flag=True)
+@click.option('-c', '--last-commit', help="Run check for all files in all commits", is_flag=True)
+@option_verbose
+@option_dry_run
 @click.argument('precommit_args', nargs=-1, type=click.UNPROCESSED)
 def static_check(
     verbose: bool,
+    dry_run: bool,
     all_files: bool,
     show_diff_on_failure: bool,
     last_commit: bool,
@@ -689,7 +1161,7 @@ def static_check(
     files: bool,
     precommit_args: Tuple,
 ):
-    if check_package_installed('pre_commit'):
+    if check_pre_commit_installed(verbose=verbose):
         command_to_execute = ['pre-commit', 'run']
         for single_check in type:
             command_to_execute.append(single_check)
@@ -701,9 +1173,76 @@ def static_check(
             command_to_execute.extend(["--from-ref", "HEAD^", "--to-ref", "HEAD"])
         if files:
             command_to_execute.append("--files")
+        if verbose:
+            command_to_execute.append("--verbose")
         if precommit_args:
             command_to_execute.extend(precommit_args)
-        run_command(command_to_execute, suppress_raise_exception=True, suppress_console_print=True, text=True)
+        run_command(
+            command_to_execute,
+            verbose=verbose,
+            dry_run=dry_run,
+            check=False,
+            no_output_dump_on_exception=True,
+            text=True,
+        )
+
+
+@main.command(name="stop", help="Stops running breeze environment.")
+@option_verbose
+@option_dry_run
+@click.option(
+    "-p",
+    "--preserve-volumes",
+    help="By default the stop command removes volumes with data. " "Specifying the flag will preserve them.",
+    is_flag=True,
+)
+def stop(verbose: bool, dry_run: bool, preserve_volumes: bool):
+    command_to_execute = ['docker-compose', 'down', "--remove-orphans"]
+    if not preserve_volumes:
+        command_to_execute.append("--volumes")
+    shell_params = ShellParams({})
+    env_variables = construct_env_variables_docker_compose_command(shell_params)
+    run_command(command_to_execute, verbose=verbose, dry_run=dry_run, env=env_variables)
+
+
+@main.command(name="cleanup", help="Removes the cache of parameters, images and cleans up docker cache.")
+@option_verbose
+@option_dry_run
+def cleanup(verbose: bool, dry_run: bool):
+    console.print("\n[bright_yellow]Removing cache of parameters, images, and cleans up docker cache[/]")
+    if click.confirm("Are you sure?"):
+        docker_images_command_to_execute = [
+            'docker',
+            'images',
+            '--filter',
+            'label=org.apache.airflow.image',
+            '--format',
+            '{{.Repository}}:{{.Tag}}',
+        ]
+        process = run_command(
+            docker_images_command_to_execute, verbose=verbose, text=True, capture_output=True
+        )
+        images = process.stdout.splitlines() if process and process.stdout else []
+        if images:
+            console.print("[light_blue]Removing images:[/]")
+            for image in images:
+                console.print(f"[light_blue] * {image}[/]")
+            console.print()
+            docker_rmi_command_to_execute = [
+                'docker',
+                'rmi',
+                '--force',
+            ]
+            docker_rmi_command_to_execute.extend(images)
+            run_command(docker_rmi_command_to_execute, verbose=verbose, dry_run=dry_run, check=False)
+        else:
+            console.print("[light_blue]No images to remote[/]\n")
+        system_prune_command_to_execute = ['docker', 'system', 'prune']
+        console.print("Pruning docker images")
+        run_command(system_prune_command_to_execute, verbose=verbose, dry_run=dry_run, check=False)
+        console.print(f"Removing build cache dir ${BUILD_CACHE_DIR}")
+        if not dry_run:
+            shutil.rmtree(BUILD_CACHE_DIR, ignore_errors=True)
 
 
 if __name__ == '__main__':
diff --git a/dev/breeze/src/airflow_breeze/docs_generator/__init__.py b/dev/breeze/src/airflow_breeze/build_image/__init__.py
similarity index 96%
rename from dev/breeze/src/airflow_breeze/docs_generator/__init__.py
rename to dev/breeze/src/airflow_breeze/build_image/__init__.py
index 13a83393a9..7b70ea1ee8 100644
--- a/dev/breeze/src/airflow_breeze/docs_generator/__init__.py
+++ b/dev/breeze/src/airflow_breeze/build_image/__init__.py
@@ -14,3 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Build CI and PROD images."""
diff --git a/dev/breeze/src/airflow_breeze/ci/__init__.py b/dev/breeze/src/airflow_breeze/build_image/ci/__init__.py
similarity index 96%
rename from dev/breeze/src/airflow_breeze/ci/__init__.py
rename to dev/breeze/src/airflow_breeze/build_image/ci/__init__.py
index 13a83393a9..7a633ffa0b 100644
--- a/dev/breeze/src/airflow_breeze/ci/__init__.py
+++ b/dev/breeze/src/airflow_breeze/build_image/ci/__init__.py
@@ -14,3 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Building CI image."""
diff --git a/dev/breeze/src/airflow_breeze/build_image/ci/build_ci_image.py b/dev/breeze/src/airflow_breeze/build_image/ci/build_ci_image.py
new file mode 100644
index 0000000000..e7805170c3
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/build_image/ci/build_ci_image.py
@@ -0,0 +1,122 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Command to build CI image."""
+from typing import Dict
+
+from airflow_breeze.build_image.ci.build_ci_params import BuildCiParams
+from airflow_breeze.utils.cache import synchronize_parameters_with_cache, touch_cache_file
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.docker_command_utils import construct_build_docker_command
+from airflow_breeze.utils.md5_build_check import calculate_md5_checksum_for_files
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, BUILD_CACHE_DIR
+from airflow_breeze.utils.registry import login_to_docker_registry
+from airflow_breeze.utils.run_utils import filter_out_none, fix_group_permissions, run_command
+
+REQUIRED_CI_IMAGE_ARGS = [
+    "python_base_image",
+    "airflow_version",
+    "airflow_branch",
+    "airflow_extras",
+    "airflow_pre_cached_pip_packages",
+    "additional_airflow_extras",
+    "additional_python_deps",
+    "additional_dev_apt_command",
+    "additional_dev_apt_deps",
+    "additional_dev_apt_env",
+    "additional_runtime_apt_command",
+    "additional_runtime_apt_deps",
+    "additional_runtime_apt_env",
+    "upgrade_to_newer_dependencies",
+    "constraints_github_repository",
+    "airflow_constraints_reference",
+    "airflow_constraints",
+    "airflow_image_repository",
+    "airflow_image_date_created",
+    "build_id",
+]
+
+OPTIONAL_CI_IMAGE_ARGS = [
+    "dev_apt_command",
+    "dev_apt_deps",
+    "runtime_apt_command",
+    "runtime_apt_deps",
+]
+
+
+def get_ci_image_build_params(parameters_passed: Dict[str, str]) -> BuildCiParams:
+    """
+    Converts parameters received as dict into BuildCiParams. In case cacheable
+    parameters are missing, it reads the last used value for that parameter
+    from the cache and if it is not found, it uses default value for that parameter.
+
+    This method updates cached based on parameters passed via Dict.
+
+    :param parameters_passed: parameters to use when constructing BuildCiParams
+    """
+    ci_image_params = BuildCiParams(**parameters_passed)
+    synchronize_parameters_with_cache(ci_image_params, parameters_passed)
+    return ci_image_params
+
+
+def build_image(verbose: bool, dry_run: bool, **kwargs) -> None:
+    """
+    Builds CI image:
+
+      * fixes group permissions for files (to improve caching when umask is 002)
+      * converts all the parameters received via kwargs into BuildCIParams (including cache)
+      * prints info about the image to build
+      * logs int to docker registry on CI if build cache is being executed
+      * removes "tag" for previously build image so that inline cache uses only remote image
+      * constructs docker-compose command to run based on parameters passed
+      * run the build command
+      * update cached information that the build completed and saves checksums of all files
+        for quick future check if the build is needed
+
+    :param verbose: print commands when running
+    :param dry_run: do not execute "write" commands - just print what would happen
+    :param kwargs: arguments passed from the command
+    """
+    fix_group_permissions()
+    parameters_passed = filter_out_none(**kwargs)
+    ci_image_params = get_ci_image_build_params(parameters_passed)
+    ci_image_params.print_info()
+    run_command(
+        ["docker", "rmi", "--no-prune", "--force", ci_image_params.airflow_image_name],
+        verbose=verbose,
+        dry_run=dry_run,
+        cwd=AIRFLOW_SOURCES_ROOT,
+        text=True,
+        check=False,
+    )
+    cmd = construct_build_docker_command(
+        image_params=ci_image_params,
+        verbose=verbose,
+        required_args=REQUIRED_CI_IMAGE_ARGS,
+        optional_args=OPTIONAL_CI_IMAGE_ARGS,
+        production_image=False,
+    )
+    if ci_image_params.prepare_buildx_cache:
+        login_to_docker_registry(ci_image_params)
+    console.print(f"\n[blue]Building CI Image for Python {ci_image_params.python}\n")
+    run_command(cmd, verbose=verbose, dry_run=dry_run, cwd=AIRFLOW_SOURCES_ROOT, text=True)
+    if not dry_run:
+        ci_image_cache_dir = BUILD_CACHE_DIR / ci_image_params.airflow_branch
+        ci_image_cache_dir.mkdir(parents=True, exist_ok=True)
+        touch_cache_file(f"built_{ci_image_params.python}", root_dir=ci_image_cache_dir)
+        calculate_md5_checksum_for_files(ci_image_params.md5sum_cache_dir, update=True)
+    else:
+        console.print("[blue]Not updating build cache because we are in `dry_run` mode.[/]")
diff --git a/dev/breeze/src/airflow_breeze/ci/build_params.py b/dev/breeze/src/airflow_breeze/build_image/ci/build_ci_params.py
similarity index 59%
rename from dev/breeze/src/airflow_breeze/ci/build_params.py
rename to dev/breeze/src/airflow_breeze/build_image/ci/build_ci_params.py
index 94e462695a..ac7901e4aa 100644
--- a/dev/breeze/src/airflow_breeze/ci/build_params.py
+++ b/dev/breeze/src/airflow_breeze/build_image/ci/build_ci_params.py
@@ -14,37 +14,36 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Parameters for Build CI Image."""
 import os
-import sys
 from dataclasses import dataclass
 from datetime import datetime
+from pathlib import Path
 from typing import List, Optional
 
 from airflow_breeze.branch_defaults import AIRFLOW_BRANCH, DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
-from airflow_breeze.console import console
 from airflow_breeze.global_constants import get_airflow_version
-from airflow_breeze.utils.docker_command_utils import check_if_buildx_plugin_available
-from airflow_breeze.utils.run_utils import run_command
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.path_utils import BUILD_CACHE_DIR
 
 
 @dataclass
-class BuildParams:
-    # To construct ci_image_name
+class BuildCiParams:
+    """
+    CI build parameters. Those parameters are used to determine command issued to build CI image.
+    """
+
     upgrade_to_newer_dependencies: str = "false"
-    python_version: str = "3.7"
+    python: str = "3.7"
     airflow_branch: str = AIRFLOW_BRANCH
     build_id: int = 0
-    # To construct docker cache ci directive
     docker_cache: str = "pulled"
     airflow_extras: str = "devel_ci"
+    install_providers_from_sources: bool = False
     additional_airflow_extras: str = ""
     additional_python_deps: str = ""
-    # To construct ci_image_name
-    tag: str = "latest"
-    # To construct airflow_image_repository
     github_repository: str = "apache/airflow"
     constraints_github_repository: str = "apache/airflow"
-    # Not sure if defaultConstraintsBranch and airflow_constraints_reference are different
     default_constraints_branch: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
     airflow_constraints: str = "constraints-source-providers"
     airflow_constraints_reference: Optional[str] = "constraints-main"
@@ -52,6 +51,7 @@ class BuildParams:
     airflow_pre_cached_pip_packages: str = "true"
     dev_apt_command: str = ""
     dev_apt_deps: str = ""
+    image_tag: str = ""
     additional_dev_apt_command: str = ""
     additional_dev_apt_deps: str = ""
     additional_dev_apt_env: str = ""
@@ -63,24 +63,24 @@ class BuildParams:
     platform: str = f"linux/{os.uname().machine}"
     debian_version: str = "bullseye"
     prepare_buildx_cache: bool = False
-    ci: bool = False
+    skip_rebuild_check: bool = False
 
     @property
-    def airflow_image_name(self):
+    def airflow_base_image_name(self):
         image = f'ghcr.io/{self.github_repository.lower()}'
         return image
 
     @property
-    def airflow_ci_image_name(self):
+    def airflow_image_name(self):
         """Construct CI image link"""
-        image = f'{self.airflow_image_name}/{self.airflow_branch}/ci/python{self.python_version}'
+        image = f'{self.airflow_base_image_name}/{self.airflow_branch}/ci/python{self.python}'
         return image
 
     @property
     def airflow_ci_image_name_with_tag(self):
         """Construct CI image link"""
-        image = f'{self.airflow_image_name}/{self.airflow_branch}/ci/python{self.python_version}'
-        return image if not self.tag else image + f":{self.tag}"
+        image = f'{self.airflow_base_image_name}/{self.airflow_branch}/ci/python{self.python}'
+        return image if not self.image_tag else image + f":{self.image_tag}"
 
     @property
     def airflow_image_repository(self):
@@ -89,86 +89,53 @@ class BuildParams:
     @property
     def python_base_image(self):
         """Construct Python Base Image"""
-        return f'python:{self.python_version}-slim-{self.debian_version}'
+        return f'python:{self.python}-slim-{self.debian_version}'
 
     @property
     def airflow_ci_local_manifest_image(self):
         """Construct CI Local Manifest Image"""
-        return f'local-airflow-ci-manifest/{self.airflow_branch}/python{self.python_version}'
+        return f'local-airflow-ci-manifest/{self.airflow_branch}/python{self.python}'
 
     @property
     def airflow_ci_remote_manifest_image(self):
         """Construct CI Remote Manifest Image"""
-        return f'{self.airflow_ci_image_name}/{self.airflow_branch}/ci-manifest//python:{self.python_version}'
+        return f'{self.airflow_image_name}/{self.airflow_branch}/ci-manifest//python:{self.python}'
 
     @property
     def airflow_image_date_created(self):
         now = datetime.now()
         return now.strftime("%Y-%m-%dT%H:%M:%SZ")
 
-    @property
-    def commit_sha(self):
-        output = run_command(['git', 'rev-parse', 'HEAD'], capture_output=True, text=True)
-        return output.stdout.strip()
-
     @property
     def airflow_version(self):
         return get_airflow_version()
 
-    def check_buildx_plugin_build_command(self):
-        build_command_param = []
-        is_buildx_available = check_if_buildx_plugin_available(True)
-        if is_buildx_available:
-            if self.prepare_buildx_cache:
-                build_command_param.extend(
-                    ["buildx", "build", "--builder", "airflow_cache", "--progress=tty"]
-                )
-                cmd = ['docker', 'buildx', 'inspect', 'airflow_cache']
-                output = run_command(cmd, verbose=True, text=True)
-                if output.returncode != 0:
-                    next_cmd = ['docker', 'buildx', 'create', '--name', 'airflow_cache']
-                    run_command(next_cmd, verbose=True, text=True)
-            else:
-                build_command_param.extend(["buildx", "build", "--builder", "default", "--progress=tty"])
-        else:
-            if self.prepare_buildx_cache:
-                console.print(
-                    '\n[red] Buildx cli plugin is not available and you need it to prepare buildx cache. \n'
-                )
-                console.print(
-                    '[red] Please install it following https://docs.docker.com/buildx/working-with-buildx/ \n'
-                )
-                sys.exit()
-            build_command_param.append("build")
-        return build_command_param
-
     @property
-    def docker_cache_ci_directive(self) -> List:
+    def docker_cache_ci_directive(self) -> List[str]:
         docker_cache_ci_directive = []
 
         if self.docker_cache == "pulled":
-            docker_cache_ci_directive.append(f"--cache-from={self.airflow_ci_image_name}")
+            docker_cache_ci_directive.append(f"--cache-from={self.airflow_image_name}")
         elif self.docker_cache == "disabled":
             docker_cache_ci_directive.append("--no-cache")
         else:
             docker_cache_ci_directive = []
-
         if self.prepare_buildx_cache:
             docker_cache_ci_directive.extend(["--cache-to=type=inline,mode=max", "--push"])
         return docker_cache_ci_directive
 
     @property
-    def extra_docker_ci_flags(self) -> List[str]:
+    def extra_docker_build_flags(self) -> List[str]:
         extra_ci_flags = []
-        if self.ci:
-            extra_ci_flags.extend(
-                [
-                    "--build-arg",
-                    "PIP_PROGRESS_BAR=off",
-                ]
-            )
         if self.airflow_constraints_location is not None and len(self.airflow_constraints_location) > 0:
             extra_ci_flags.extend(
                 ["--build-arg", f"AIRFLOW_CONSTRAINTS_LOCATION={self.airflow_constraints_location}"]
             )
         return extra_ci_flags
+
+    @property
+    def md5sum_cache_dir(self) -> Path:
+        return Path(BUILD_CACHE_DIR, self.airflow_branch, self.python, "CI")
+
+    def print_info(self):
+        console.print(f"CI Image: {self.airflow_version} Python: {self.python}.")
diff --git a/dev/breeze/src/airflow_breeze/prod/__init__.py b/dev/breeze/src/airflow_breeze/build_image/prod/__init__.py
similarity index 96%
rename from dev/breeze/src/airflow_breeze/prod/__init__.py
rename to dev/breeze/src/airflow_breeze/build_image/prod/__init__.py
index 13a83393a9..141dd81f21 100644
--- a/dev/breeze/src/airflow_breeze/prod/__init__.py
+++ b/dev/breeze/src/airflow_breeze/build_image/prod/__init__.py
@@ -14,3 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Building PROD Image."""
diff --git a/dev/breeze/src/airflow_breeze/build_image/prod/build_prod_image.py b/dev/breeze/src/airflow_breeze/build_image/prod/build_prod_image.py
new file mode 100644
index 0000000000..2974d00d46
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/build_image/prod/build_prod_image.py
@@ -0,0 +1,173 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Command to build PROD image."""
+import contextlib
+import sys
+from typing import Dict
+
+from airflow_breeze.build_image.prod.build_prod_params import BuildProdParams
+from airflow_breeze.utils.cache import synchronize_parameters_with_cache
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.docker_command_utils import construct_build_docker_command
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, DOCKER_CONTEXT_DIR
+from airflow_breeze.utils.registry import login_to_docker_registry
+from airflow_breeze.utils.run_utils import filter_out_none, fix_group_permissions, run_command
+
+REQUIRED_PROD_IMAGE_ARGS = [
+    "python_base_image",
+    "install_mysql_client",
+    "install_mssql_client",
+    "install_postgres_client",
+    "airflow_version",
+    "airflow_branch",
+    "airflow_extras",
+    "airflow_pre_cached_pip_packages",
+    "docker_context_files",
+    "additional_airflow_extras",
+    "additional_python_deps",
+    "additional_dev_apt_command",
+    "additional_dev_apt_deps",
+    "additional_dev_apt_env",
+    "additional_runtime_apt_command",
+    "additional_runtime_apt_deps",
+    "additional_runtime_apt_env",
+    "upgrade_to_newer_dependencies",
+    "constraints_github_repository",
+    "airflow_constraints",
+    "airflow_image_repository",
+    "airflow_image_date_created",
+    "build_id",
+    "airflow_image_readme_url",
+    "install_providers_from_sources",
+    "install_from_pypi",
+    "install_from_docker_context_files",
+]
+
+OPTIONAL_PROD_IMAGE_ARGS = [
+    "dev_apt_command",
+    "dev_apt_deps",
+    "runtime_apt_command",
+    "runtime_apt_deps",
+]
+
+
+def clean_docker_context_files():
+    """
+    Cleans up docker context files folder - leaving only README.md there.
+    """
+    with contextlib.suppress(FileNotFoundError):
+        context_files_to_delete = DOCKER_CONTEXT_DIR.glob('**/*')
+        for file_to_delete in context_files_to_delete:
+            if file_to_delete.name != 'README.md':
+                file_to_delete.unlink()
+
+
+def check_docker_context_files(install_from_docker_context_files: bool):
+    """
+    Sanity check - if we want to install from docker-context-files we expect some packages there but if
+    we don't - we don't expect them, and they might invalidate Docker cache.
+
+    This method exits with an error if what we see is unexpected for given operation.
+
+    :param install_from_docker_context_files: whether we want to install from docker-context-files
+    """
+    context_file = DOCKER_CONTEXT_DIR.glob('**/*')
+    number_of_context_files = len(
+        [context for context in context_file if context.is_file() and context.name != 'README.md']
+    )
+    if number_of_context_files == 0:
+        if install_from_docker_context_files:
+            console.print('[bright_yellow]\nERROR! You want to install packages from docker-context-files')
+            console.print('[bright_yellow]\n but there are no packages to install in this folder.')
+            sys.exit(1)
+    else:
+        if not install_from_docker_context_files:
+            console.print(
+                '[bright_yellow]\n ERROR! There are some extra files in docker-context-files except README.md'
+            )
+            console.print('[bright_yellow]\nAnd you did not choose --install-from-docker-context-files flag')
+            console.print(
+                '[bright_yellow]\nThis might result in unnecessary cache invalidation and long build times'
+            )
+            console.print(
+                '[bright_yellow]\nExiting now \
+                    - please restart the command with --cleanup-docker-context-files switch'
+            )
+            sys.exit(1)
+
+
+def get_prod_image_build_params(parameters_passed: Dict[str, str]) -> BuildProdParams:
+    """
+    Converts parameters received as dict into BuildProdParams. In case cacheable
+    parameters are missing, it reads the last used value for that parameter
+    from the cache and if it is not found, it uses default value for that parameter.
+
+    This method updates cached based on parameters passed via Dict.
+
+    :param parameters_passed: parameters to use when constructing BuildCiParams
+    """
+    prod_image_params = BuildProdParams(**parameters_passed)
+    synchronize_parameters_with_cache(prod_image_params, parameters_passed)
+    return prod_image_params
+
+
+def build_production_image(verbose: bool, dry_run: bool, **kwargs):
+    """
+    Builds PROD image:
+
+      * fixes group permissions for files (to improve caching when umask is 002)
+      * converts all the parameters received via kwargs into BuildProdParams (including cache)
+      * prints info about the image to build
+      * removes docker-context-files if requested
+      * performs sanity check if the files are present in docker-context-files if expected
+      * logs int to docker registry on CI if build cache is being executed
+      * removes "tag" for previously build image so that inline cache uses only remote image
+      * constructs docker-compose command to run based on parameters passed
+      * run the build command
+      * update cached information that the build completed and saves checksums of all files
+        for quick future check if the build is needed
+
+    :param verbose: print commands when running
+    :param dry_run: do not execute "write" commands - just print what would happen
+    :param kwargs: arguments passed from the command
+    """
+    fix_group_permissions()
+    parameters_passed = filter_out_none(**kwargs)
+    prod_image_params = get_prod_image_build_params(parameters_passed)
+    prod_image_params.print_info()
+    if prod_image_params.cleanup_docker_context_files:
+        clean_docker_context_files()
+    check_docker_context_files(prod_image_params.install_docker_context_files)
+    if prod_image_params.prepare_buildx_cache:
+        login_to_docker_registry(prod_image_params)
+    run_command(
+        ["docker", "rmi", "--no-prune", "--force", prod_image_params.airflow_image_name],
+        verbose=verbose,
+        dry_run=dry_run,
+        cwd=AIRFLOW_SOURCES_ROOT,
+        text=True,
+        check=False,
+    )
+    console.print(f"\n[blue]Building PROD Image for Python {prod_image_params.python}\n")
+    cmd = construct_build_docker_command(
+        image_params=prod_image_params,
+        verbose=verbose,
+        required_args=REQUIRED_PROD_IMAGE_ARGS,
+        optional_args=OPTIONAL_PROD_IMAGE_ARGS,
+        production_image=True,
+    )
+    run_command(cmd, verbose=verbose, dry_run=dry_run, cwd=AIRFLOW_SOURCES_ROOT, text=True)
diff --git a/dev/breeze/src/airflow_breeze/prod/prod_params.py b/dev/breeze/src/airflow_breeze/build_image/prod/build_prod_params.py
similarity index 68%
rename from dev/breeze/src/airflow_breeze/prod/prod_params.py
rename to dev/breeze/src/airflow_breeze/build_image/prod/build_prod_params.py
index 9e5155c615..74667be5f0 100644
--- a/dev/breeze/src/airflow_breeze/prod/prod_params.py
+++ b/dev/breeze/src/airflow_breeze/build_image/prod/build_prod_params.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Parameters to build PROD image."""
 import os
 import re
 import sys
@@ -22,7 +23,6 @@ from datetime import datetime
 from typing import List
 
 from airflow_breeze.branch_defaults import AIRFLOW_BRANCH, DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
-from airflow_breeze.console import console
 from airflow_breeze.global_constants import (
     AIRFLOW_SOURCES_FROM,
     AIRFLOW_SOURCES_TO,
@@ -31,33 +31,33 @@ from airflow_breeze.global_constants import (
     get_airflow_extras,
     get_airflow_version,
 )
-from airflow_breeze.utils.docker_command_utils import check_if_buildx_plugin_available
-from airflow_breeze.utils.run_utils import is_multi_platform, run_command
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.run_utils import commit_sha
 
 
 @dataclass
-class ProdParams:
-    build_cache_local: bool
-    build_cache_pulled: bool
-    build_cache_disabled: bool
-    skip_rebuild_check: bool
-    disable_mysql_client_installation: bool
-    disable_mssql_client_installation: bool
-    disable_postgres_client_installation: bool
-    install_docker_context_files: bool
-    disable_pypi_when_building: bool
-    disable_pip_cache: bool
-    skip_installing_airflow_providers_from_sources: bool
-    cleanup_docker_context_files: bool
-    prepare_buildx_cache: bool
+class BuildProdParams:
+    """
+    PROD build parameters. Those parameters are used to determine command issued to build PROD image.
+    """
+
+    docker_cache: str
+    disable_mysql_client_installation: bool = False
+    disable_mssql_client_installation: bool = False
+    disable_postgres_client_installation: bool = False
+    install_docker_context_files: bool = False
+    disable_airflow_repo_cache: bool = False
+    install_providers_from_sources: bool = True
+    cleanup_docker_context_files: bool = False
+    prepare_buildx_cache: bool = False
+    disable_pypi: bool = False
     upgrade_to_newer_dependencies: str = "false"
     airflow_version: str = get_airflow_version()
-    python_version: str = "3.7"
+    python: str = "3.7"
     airflow_branch_for_pypi_preloading: str = AIRFLOW_BRANCH
     install_airflow_reference: str = ""
     install_airflow_version: str = ""
     default_constraints_branch = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
-    ci: bool = False
     build_id: int = 0
     airflow_constraints: str = "constraints-source-providers"
     github_repository: str = "apache/airflow"
@@ -89,14 +89,14 @@ class ProdParams:
         return self.airflow_branch_for_pypi_preloading
 
     @property
-    def airflow_image_name(self):
+    def airflow_base_image_name(self):
         image = f'ghcr.io/{self.github_repository.lower()}'
         return image
 
     @property
-    def airflow_prod_image_name(self):
+    def airflow_image_name(self):
         """Construct PROD image link"""
-        image = f'{self.airflow_image_name}/{self.airflow_branch}/prod/python{self.python_version}'
+        image = f'{self.airflow_base_image_name}/{self.airflow_branch}/prod/python{self.python}'
         return image
 
     @property
@@ -104,11 +104,6 @@ class ProdParams:
         the_image_type = 'PROD'
         return the_image_type
 
-    @property
-    def image_description(self) -> str:
-        image_description = 'Airflow production'
-        return image_description
-
     @property
     def args_for_remote_install(self) -> List:
         build_args = []
@@ -124,8 +119,6 @@ class ProdParams:
                 "AIRFLOW_SOURCES_TO=/empty",
             ]
         )
-        if self.ci:
-            build_args.extend(["--build-arg", "PIP_PROGRESS_BAR=off"])
         if len(self.airflow_constraints_reference) > 0:
             build_args.extend(
                 ["--build-arg", f"AIRFLOW_CONSTRAINTS_REFERENCE={self.airflow_constraints_reference}"]
@@ -210,21 +203,11 @@ class ProdParams:
         return extra_build_flags
 
     @property
-    def docker_cache(self) -> str:
-        if self.build_cache_local:
-            docker_cache = "local"
-        elif self.build_cache_disabled:
-            docker_cache = "disabled"
-        else:
-            docker_cache = "pulled"
-        return docker_cache
-
-    @property
-    def docker_cache_prod_directive(self) -> List:
+    def docker_cache_prod_directive(self) -> List[str]:
         docker_cache_prod_directive = []
 
         if self.docker_cache == "pulled":
-            docker_cache_prod_directive.append(f"--cache-from={self.airflow_prod_image_name}")
+            docker_cache_prod_directive.append(f"--cache-from={self.airflow_image_name}")
         elif self.docker_cache == "disabled":
             docker_cache_prod_directive.append("--no-cache")
         else:
@@ -232,49 +215,13 @@ class ProdParams:
 
         if self.prepare_buildx_cache:
             docker_cache_prod_directive.extend(["--cache-to=type=inline,mode=max", "--push"])
-            if is_multi_platform(self.platform):
-                console.print("\nSkip loading docker image on multi-platform build")
-            else:
-                docker_cache_prod_directive.extend(["--load"])
         return docker_cache_prod_directive
 
-    def check_buildx_plugin_build_command(self):
-        build_command_param = []
-        is_buildx_available = check_if_buildx_plugin_available(True)
-        if is_buildx_available:
-            if self.prepare_buildx_cache:
-                build_command_param.extend(
-                    ["buildx", "build", "--builder", "airflow_cache", "--progress=tty"]
-                )
-                cmd = ['docker', 'buildx', 'inspect', 'airflow_cache']
-                output = run_command(cmd, verbose=True, text=True)
-                if output.returncode != 0:
-                    next_cmd = ['docker', 'buildx', 'create', '--name', 'airflow_cache']
-                    run_command(next_cmd, verbose=True, text=True)
-            else:
-                build_command_param.extend(["buildx", "build", "--builder", "default", "--progress=tty"])
-        else:
-            if self.prepare_buildx_cache:
-                console.print(
-                    '\n[red] Buildx cli plugin is not available and you need it to prepare buildx cache. \n'
-                )
-                console.print(
-                    '[red] Please install it following https://docs.docker.com/buildx/working-with-buildx/ \n'
-                )
-                sys.exit()
-            build_command_param.append("build")
-        return build_command_param
-
     @property
     def python_base_image(self):
         """Construct Python Base Image"""
         #  ghcr.io/apache/airflow/main/python:3.8-slim-bullseye
-        return f'python:{self.python_version}-slim-{self.debian_version}'
-
-    @property
-    def commit_sha(self):
-        output = run_command(['git', 'rev-parse', 'HEAD'], capture_output=True, text=True)
-        return output.stdout.strip()
+        return f'python:{self.python}-slim-{self.debian_version}'
 
     @property
     def airflow_image_repository(self):
@@ -287,41 +234,22 @@ class ProdParams:
 
     @property
     def airflow_image_readme_url(self):
-        return (
-            f"https://raw.githubusercontent.com/apache/airflow/{self.commit_sha}/docs/docker-stack/README.md"
-        )
+        return f"https://raw.githubusercontent.com/apache/airflow/{commit_sha()}/docs/docker-stack/README.md"
 
     def print_info(self):
-        console.print(
-            f"Airflow {self.airflow_version} Python: {self.python_version}.\
-                 Image description: {self.image_description}"
-        )
-
-    @property
-    def skip_building_prod_image(self) -> bool:
-        skip_build = False
-        if self.skip_rebuild_check:
-            skip_build = True
-        return skip_build
-
-    @property
-    def check_image_for_rebuild(self) -> bool:
-        check_image = True
-        if self.skip_rebuild_check:
-            check_image = False
-        return check_image
+        console.print(f"CI Image: {self.airflow_version} Python: {self.python}.")
 
     @property
     def install_from_pypi(self) -> str:
         install_from_pypi = 'true'
-        if self.disable_pypi_when_building:
+        if self.disable_pypi:
             install_from_pypi = 'false'
         return install_from_pypi
 
     @property
     def airflow_pre_cached_pip_packages(self) -> str:
         airflow_pre_cached_pip = 'true'
-        if self.disable_pypi_when_building or self.disable_pip_cache:
+        if self.disable_pypi or self.disable_airflow_repo_cache:
             airflow_pre_cached_pip = 'false'
         return airflow_pre_cached_pip
 
@@ -346,17 +274,6 @@ class ProdParams:
             install_postgres = 'false'
         return install_postgres
 
-    @property
-    def install_providers_from_sources(self) -> str:
-        install_providers_source = 'true'
-        if (
-            self.skip_installing_airflow_providers_from_sources
-            or len(self.install_airflow_reference) > 0
-            or len(self.install_airflow_version) > 0
-        ):
-            install_providers_source = 'false'
-        return install_providers_source
-
     @property
     def install_from_docker_context_files(self) -> str:
         install_from_docker_context_files = 'false'
diff --git a/dev/breeze/src/airflow_breeze/cache.py b/dev/breeze/src/airflow_breeze/cache.py
deleted file mode 100644
index c9ae31ee27..0000000000
--- a/dev/breeze/src/airflow_breeze/cache.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import sys
-from pathlib import Path
-from typing import Any, List, Optional, Tuple
-
-from airflow_breeze import global_constants
-from airflow_breeze.console import console
-from airflow_breeze.utils.path_utils import BUILD_CACHE_DIR
-
-
-def check_if_cache_exists(param_name: str) -> bool:
-    return (Path(BUILD_CACHE_DIR) / f".{param_name}").exists()
-
-
-def read_from_cache_file(param_name: str) -> Optional[str]:
-    cache_exists = check_if_cache_exists(param_name)
-    if cache_exists:
-        return (Path(BUILD_CACHE_DIR) / f".{param_name}").read_text().strip()
-    else:
-        return None
-
-
-def touch_cache_file(param_name: str, root_dir: Path = BUILD_CACHE_DIR):
-    (Path(root_dir) / f".{param_name}").touch()
-
-
-def write_to_cache_file(param_name: str, param_value: str, check_allowed_values: bool = True) -> None:
-    allowed = False
-    allowed_values = None
-    if check_allowed_values:
-        allowed, allowed_values = check_if_values_allowed(param_name, param_value)
-    if allowed or not check_allowed_values:
-        print('BUILD CACHE DIR:', BUILD_CACHE_DIR)
-        cache_file = Path(BUILD_CACHE_DIR, f".{param_name}").open("w+")
-        cache_file.write(param_value)
-    else:
-        console.print(f'[cyan]You have sent the {param_value} for {param_name}')
-        console.print(f'[cyan]Allowed value for the {param_name} are {allowed_values}')
-        console.print('[cyan]Provide one of the supported params. Write to cache dir failed')
-        sys.exit()
-
-
-def check_cache_and_write_if_not_cached(
-    param_name: str, default_param_value: str
-) -> Tuple[bool, Optional[str]]:
-    is_cached = False
-    cached_value = read_from_cache_file(param_name)
-    if cached_value is None:
-        write_to_cache_file(param_name, default_param_value)
-        cached_value = default_param_value
-    else:
-        allowed, allowed_values = check_if_values_allowed(param_name, cached_value)
-        if allowed:
-            is_cached = True
-        else:
-            write_to_cache_file(param_name, default_param_value)
-            cached_value = default_param_value
-    return is_cached, cached_value
-
-
-def check_if_values_allowed(param_name: str, param_value: str) -> Tuple[bool, List[Any]]:
-    allowed = False
-    allowed_values: List[Any] = []
-    allowed_values = getattr(global_constants, f'ALLOWED_{param_name.upper()}S')
-    if param_value in allowed_values:
-        allowed = True
-    return allowed, allowed_values
-
-
-def delete_cache(param_name: str) -> bool:
-    deleted = False
-    if check_if_cache_exists(param_name):
-        (Path(BUILD_CACHE_DIR) / f".{param_name}").unlink()
-        deleted = True
-    return deleted
-
-
-def update_md5checksum_in_cache(file_content: str, cache_file_name: Path) -> bool:
-    modified = False
-    if cache_file_name.exists():
-        old_md5_checksum_content = Path(cache_file_name).read_text()
-        if old_md5_checksum_content.strip() != file_content.strip():
-            Path(cache_file_name).write_text(file_content)
-            modified = True
-    else:
-        Path(cache_file_name).write_text(file_content)
-        modified = True
-    return modified
-
-
-def write_env_in_cache(env_variables) -> Path:
-    shell_path = Path(BUILD_CACHE_DIR, "shell_command.env")
-    with open(shell_path, 'w') as shell_env_file:
-        for env_variable in env_variables:
-            shell_env_file.write(env_variable + '\n')
-    return shell_path
diff --git a/dev/breeze/src/airflow_breeze/ci/build_image.py b/dev/breeze/src/airflow_breeze/ci/build_image.py
deleted file mode 100644
index 818cdf29ec..0000000000
--- a/dev/breeze/src/airflow_breeze/ci/build_image.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-from pathlib import Path
-from typing import Dict, List
-
-from airflow_breeze.cache import check_cache_and_write_if_not_cached, touch_cache_file, write_to_cache_file
-from airflow_breeze.ci.build_params import BuildParams
-from airflow_breeze.utils.path_utils import AIRFLOW_SOURCE, BUILD_CACHE_DIR
-from airflow_breeze.utils.run_utils import filter_out_none, fix_group_permissions, run_command
-
-PARAMS_CI_IMAGE = [
-    "python_base_image",
-    "airflow_version",
-    "airflow_branch",
-    "airflow_extras",
-    "airflow_pre_cached_pip_packages",
-    "additional_airflow_extras",
-    "additional_python_deps",
-    "additional_dev_apt_command",
-    "additional_dev_apt_deps",
-    "additional_dev_apt_env",
-    "additional_runtime_apt_command",
-    "additional_runtime_apt_deps",
-    "additional_runtime_apt_env",
-    "upgrade_to_newer_dependencies",
-    "constraints_github_repository",
-    "airflow_constraints_reference",
-    "airflow_constraints",
-    "airflow_image_repository",
-    "airflow_image_date_created",
-    "build_id",
-    "commit_sha",
-]
-
-PARAMS_TO_VERIFY_CI_IMAGE = [
-    "dev_apt_command",
-    "dev_apt_deps",
-    "runtime_apt_command",
-    "runtime_apt_deps",
-]
-
-
-def construct_arguments_docker_command(ci_image: BuildParams) -> List[str]:
-    args_command = []
-    for param in PARAMS_CI_IMAGE:
-        args_command.append("--build-arg")
-        args_command.append(param.upper() + "=" + str(getattr(ci_image, param)))
-    for verify_param in PARAMS_TO_VERIFY_CI_IMAGE:
-        param_value = str(getattr(ci_image, verify_param))
-        if len(param_value) > 0:
-            args_command.append("--build-arg")
-            args_command.append(verify_param.upper() + "=" + param_value)
-    docker_cache = ci_image.docker_cache_ci_directive
-    if len(docker_cache) > 0:
-        args_command.extend(ci_image.docker_cache_ci_directive)
-    return args_command
-
-
-def construct_docker_command(ci_image: BuildParams) -> List[str]:
-    arguments = construct_arguments_docker_command(ci_image)
-    build_command = ci_image.check_buildx_plugin_build_command()
-    build_flags = ci_image.extra_docker_ci_flags
-    final_command = []
-    final_command.extend(["docker"])
-    final_command.extend(build_command)
-    final_command.extend(build_flags)
-    final_command.extend(["--pull"])
-    final_command.extend(arguments)
-    final_command.extend(["-t", ci_image.airflow_ci_image_name, "--target", "main", "."])
-    final_command.extend(["-f", 'Dockerfile.ci'])
-    final_command.extend(["--platform", ci_image.platform])
-    return final_command
-
-
-def build_image(verbose, **kwargs):
-    fix_group_permissions()
-    parameters_passed = filter_out_none(**kwargs)
-    ci_image_params = get_image_build_params(parameters_passed)
-    ci_image_cache_dir = Path(BUILD_CACHE_DIR, ci_image_params.airflow_branch)
-    ci_image_cache_dir.mkdir(parents=True, exist_ok=True)
-    touch_cache_file(
-        f"built_{ci_image_params.python_version}",
-        root_dir=ci_image_cache_dir,
-    )
-    run_command(
-        ["docker", "rmi", "--no-prune", "--force", ci_image_params.airflow_ci_image_name],
-        verbose=verbose,
-        cwd=AIRFLOW_SOURCE,
-        text=True,
-        suppress_raise_exception=True,
-    )
-    cmd = construct_docker_command(ci_image_params)
-    run_command(cmd, verbose=verbose, cwd=AIRFLOW_SOURCE, text=True)
-
-
-def get_image_build_params(parameters_passed: Dict[str, str]):
-    cacheable_parameters = {"python_version": 'PYTHON_MAJOR_MINOR_VERSION'}
-    ci_image_params = BuildParams(**parameters_passed)
-    for parameter, cache_key in cacheable_parameters.items():
-        value_from_parameter = parameters_passed.get(parameter)
-        if value_from_parameter:
-            write_to_cache_file(cache_key, value_from_parameter, check_allowed_values=True)
-            setattr(ci_image_params, parameter, value_from_parameter)
-        else:
-            is_cached, value = check_cache_and_write_if_not_cached(
-                cache_key, getattr(ci_image_params, parameter)
-            )
-            if is_cached:
-                setattr(ci_image_params, parameter, value)
-    return ci_image_params
diff --git a/dev/breeze/src/airflow_breeze/console.py b/dev/breeze/src/airflow_breeze/console.py
deleted file mode 100644
index 175fb7014e..0000000000
--- a/dev/breeze/src/airflow_breeze/console.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-from rich.console import Console
-from rich.theme import Theme
-
-custom_theme = Theme({"info": "blue", "warning": "magenta", "error": "red"})
-console = Console(force_terminal=True, color_system="standard", width=180, theme=custom_theme)
diff --git a/dev/breeze/src/airflow_breeze/docs_generator/build_documentation.py b/dev/breeze/src/airflow_breeze/docs_generator/build_documentation.py
deleted file mode 100644
index 91f6f469b6..0000000000
--- a/dev/breeze/src/airflow_breeze/docs_generator/build_documentation.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-from airflow_breeze.docs_generator.doc_builder import DocBuilder
-from airflow_breeze.global_constants import MOUNT_ALL_LOCAL_SOURCES, MOUNT_SELECTED_LOCAL_SOURCES
-from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags
-from airflow_breeze.utils.run_utils import run_command
-
-
-def build(
-    verbose: bool,
-    airflow_sources: str,
-    airflow_ci_image_name: str,
-    doc_builder: DocBuilder,
-):
-    extra_docker_flags = get_extra_docker_flags(
-        MOUNT_ALL_LOCAL_SOURCES, MOUNT_SELECTED_LOCAL_SOURCES, airflow_sources
-    )
-    cmd = []
-    cmd.extend(["docker", "run"])
-    cmd.extend(extra_docker_flags)
-    cmd.extend(["-t", "-e", "GITHUB_ACTIONS="])
-    cmd.extend(["--entrypoint", "/usr/local/bin/dumb-init", "--pull", "never"])
-    cmd.extend([airflow_ci_image_name, "--", "/opt/airflow/scripts/in_container/run_docs_build.sh"])
-    cmd.extend(doc_builder.args_doc_builder)
-    run_command(cmd, verbose=verbose, text=True)
diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py
index da048049b1..013a73a21d 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -14,11 +14,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""
+Global constants that are used by all other Breeze components.
+"""
 import os
-from pathlib import Path
 from typing import List
 
-from airflow_breeze.utils.path_utils import get_airflow_sources_root
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT
 
 # Commented this out as we are using buildkit and this vars became irrelevant
 # FORCE_PULL_IMAGES = False
@@ -34,88 +36,6 @@ DEFAULT_BACKEND = 'sqlite'
 # Checked before putting in build cache
 ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ['3.7', '3.8', '3.9', '3.10']
 ALLOWED_BACKENDS = ['sqlite', 'mysql', 'postgres', 'mssql']
-ALLOWED_STATIC_CHECKS = [
-    "all",
-    "airflow-config-yaml",
-    "airflow-providers-available",
-    "airflow-provider-yaml-files-ok",
-    "base-operator",
-    "black",
-    "blacken-docs",
-    "boring-cyborg",
-    "build-providers-dependencies",
-    "chart-schema-lint",
-    "capitalized-breeze",
-    "changelog-duplicates",
-    "check-apache-license",
-    "check-builtin-literals",
-    "check-executables-have-shebangs",
-    "check-extras-order",
-    "check-hooks-apply",
-    "check-integrations",
-    "check-merge-conflict",
-    "check-xml",
-    "check-system-tests",
-    "daysago-import-check",
-    "debug-statements",
-    "detect-private-key",
-    "doctoc",
-    "dont-use-safe-filter",
-    "end-of-file-fixer",
-    "fix-encoding-pragma",
-    "flake8",
-    "flynt",
-    "codespell",
-    "forbid-tabs",
-    "helm-lint",
-    "identity",
-    "incorrect-use-of-LoggingMixin",
-    "insert-license",
-    "isort",
-    "json-schema",
-    "language-matters",
-    "lint-dockerfile",
-    "lint-openapi",
-    "markdownlint",
-    "mermaid",
-    "mixed-line-ending",
-    "mypy",
-    "mypy-helm",
-    "no-providers-in-core-examples",
-    "no-relative-imports",
-    "pre-commit-descriptions",
-    "pre-commit-hook-names",
-    "pretty-format-json",
-    "provide-create-sessions",
-    "providers-changelogs",
-    "providers-init-file",
-    "providers-subpackages-init-file",
-    "provider-yamls",
-    "pydevd",
-    "pydocstyle",
-    "python-no-log-warn",
-    "pyupgrade",
-    "restrict-start_date",
-    "rst-backticks",
-    "setup-order",
-    "setup-extra-packages",
-    "shellcheck",
-    "sort-in-the-wild",
-    "sort-spelling-wordlist",
-    "stylelint",
-    "trailing-whitespace",
-    "ui-lint",
-    "update-breeze-file",
-    "update-extras",
-    "update-local-yml-file",
-    "update-setup-cfg-file",
-    "update-versions",
-    "verify-db-migrations-documented",
-    "version-sync",
-    "www-lint",
-    "yamllint",
-    "yesqa",
-]
 ALLOWED_INTEGRATIONS = [
     'cassandra',
     'kerberos',
@@ -134,8 +54,13 @@ ALLOWED_KIND_VERSIONS = ['v0.12.0']
 ALLOWED_HELM_VERSIONS = ['v3.6.3']
 ALLOWED_EXECUTORS = ['KubernetesExecutor', 'CeleryExecutor', 'LocalExecutor', 'CeleryKubernetesExecutor']
 ALLOWED_KIND_OPERATIONS = ['start', 'stop', 'restart', 'status', 'deploy', 'test', 'shell', 'k9s']
-ALLOWED_INSTALL_AIRFLOW_VERSIONS = ['2.0.2', '2.0.1', '2.0.0', 'wheel', 'sdist']
 ALLOWED_GENERATE_CONSTRAINTS_MODES = ['source-providers', 'pypi-providers', 'no-providers']
+
+MOUNT_SELECTED = "selected"
+MOUNT_ALL = "all"
+MOUNT_NONE = "none"
+
+ALLOWED_MOUNT_OPTIONS = [MOUNT_SELECTED, MOUNT_ALL, MOUNT_NONE]
 ALLOWED_POSTGRES_VERSIONS = ['10', '11', '12', '13']
 ALLOWED_MYSQL_VERSIONS = ['5.7', '8']
 ALLOWED_MSSQL_VERSIONS = ['2017-latest', '2019-latest']
@@ -155,8 +80,10 @@ ALLOWED_TEST_TYPES = [
     'Quarantined',
 ]
 ALLOWED_PACKAGE_FORMATS = ['both', 'sdist', 'wheel']
-ALLOWED_USE_AIRFLOW_VERSIONS = ['.', 'apache-airflow']
+ALLOWED_INSTALLATION_METHODS = ['.', 'apache-airflow']
 ALLOWED_DEBIAN_VERSIONS = ['buster', 'bullseye']
+ALLOWED_BUILD_CACHE = ["pulled", "local", "disabled"]
+ALLOWED_PLATFORMS = ["linux/amd64", "linux/arm64", "linux/amd64,linux/arm64"]
 
 PARAM_NAME_DESCRIPTION = {
     "BACKEND": "backend",
@@ -193,7 +120,7 @@ EXCLUDE_DOCS_PACKAGE_FOLDER = [
 
 
 def get_available_packages() -> List[str]:
-    docs_path_content = Path(get_airflow_sources_root(), 'docs').glob('*/')
+    docs_path_content = (AIRFLOW_SOURCES_ROOT / 'docs').glob('*/')
     available_packages = [x.name for x in docs_path_content if x.is_dir()]
     return list(set(available_packages) - set(EXCLUDE_DOCS_PACKAGE_FOLDER))
 
@@ -235,7 +162,7 @@ SQLITE_URL = "sqlite:////root/airflow/airflow.db"
 
 
 def get_airflow_version():
-    airflow_setup_file = Path(get_airflow_sources_root()) / 'setup.py'
+    airflow_setup_file = AIRFLOW_SOURCES_ROOT / 'setup.py'
     with open(airflow_setup_file) as setup_file:
         for line in setup_file.readlines():
             if "version =" in line:
@@ -243,7 +170,7 @@ def get_airflow_version():
 
 
 def get_airflow_extras():
-    airflow_dockerfile = Path(get_airflow_sources_root()) / 'Dockerfile'
+    airflow_dockerfile = AIRFLOW_SOURCES_ROOT / 'Dockerfile'
     with open(airflow_dockerfile) as dockerfile:
         for line in dockerfile.readlines():
             if "ARG AIRFLOW_EXTRAS=" in line:
@@ -284,13 +211,8 @@ FILES_FOR_REBUILD_CHECK = [
     'airflow/ui/yarn.lock',
 ]
 
-# Initialize mount variables
-MOUNT_SELECTED_LOCAL_SOURCES = True
-MOUNT_ALL_LOCAL_SOURCES = False
-
 ENABLED_SYSTEMS = ""
 
-
 CURRENT_KUBERNETES_MODES = ['image']
 CURRENT_KUBERNETES_VERSIONS = ['v1.23.4', 'v1.22.7', 'v1.21.10', 'v1.20.15']
 CURRENT_KIND_VERSIONS = ['v0.12.0']
diff --git a/dev/breeze/src/airflow_breeze/prod/build_prod_image.py b/dev/breeze/src/airflow_breeze/prod/build_prod_image.py
deleted file mode 100644
index 1573b547c4..0000000000
--- a/dev/breeze/src/airflow_breeze/prod/build_prod_image.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import contextlib
-import sys
-from typing import Dict, List
-
-from airflow_breeze.cache import check_cache_and_write_if_not_cached, write_to_cache_file
-from airflow_breeze.console import console
-from airflow_breeze.prod.prod_params import ProdParams
-from airflow_breeze.utils.path_utils import AIRFLOW_SOURCE, DOCKER_CONTEXT_DIR
-from airflow_breeze.utils.run_utils import filter_out_none, run_command
-
-PARAMS_PROD_IMAGE = [
-    "python_base_image",
-    "install_mysql_client",
-    "install_mssql_client",
-    "install_postgres_client",
-    "airflow_version",
-    "airflow_branch",
-    "airflow_extras",
-    "airflow_pre_cached_pip_packages",
-    "docker_context_files",
-    "additional_airflow_extras",
-    "additional_python_deps",
-    "additional_dev_apt_command",
-    "additional_dev_apt_deps",
-    "additional_dev_apt_env",
-    "additional_runtime_apt_command",
-    "additional_runtime_apt_deps",
-    "additional_runtime_apt_env",
-    "upgrade_to_newer_dependencies",
-    "constraints_github_repository",
-    "airflow_constraints",
-    "airflow_image_repository",
-    "airflow_image_date_created",
-    "build_id",
-    "commit_sha",
-    "airflow_image_readme_url",
-    "install_providers_from_sources",
-    "install_from_pypi",
-    "install_from_docker_context_files",
-]
-
-PARAMS_TO_VERIFY_PROD_IMAGE = [
-    "dev_apt_command",
-    "dev_apt_deps",
-    "runtime_apt_command",
-    "runtime_apt_deps",
-]
-
-
-def construct_arguments_docker_command(prod_image: ProdParams) -> List[str]:
-    args_command = []
-    for param in PARAMS_PROD_IMAGE:
-        args_command.append("--build-arg")
-        args_command.append(param.upper() + "=" + str(getattr(prod_image, param)))
-    for verify_param in PARAMS_TO_VERIFY_PROD_IMAGE:
-        param_value = str(getattr(prod_image, verify_param))
-        if len(param_value) > 0:
-            args_command.append("--build-arg")
-            args_command.append(verify_param.upper() + "=" + param_value)
-    docker_cache = prod_image.docker_cache_prod_directive
-    if len(docker_cache) > 0:
-        args_command.extend(prod_image.docker_cache_prod_directive)
-    return args_command
-
-
-def construct_docker_command(prod_image: ProdParams) -> List[str]:
-    arguments = construct_arguments_docker_command(prod_image)
-    build_command = prod_image.check_buildx_plugin_build_command()
-    build_flags = prod_image.extra_docker_build_flags
-    final_command = []
-    final_command.extend(["docker"])
-    final_command.extend(build_command)
-    final_command.extend(build_flags)
-    final_command.extend(["--pull"])
-    final_command.extend(arguments)
-    final_command.extend(["-t", prod_image.airflow_prod_image_name, "--target", "main", "."])
-    final_command.extend(["-f", 'Dockerfile'])
-    final_command.extend(["--platform", prod_image.platform])
-    return final_command
-
-
-def login_to_docker_registry(build_params: ProdParams):
-    if build_params.ci == "true":
-        if len(build_params.github_token) == 0:
-            console.print("\n[blue]Skip logging in to GitHub Registry. No Token available!")
-        elif build_params.airflow_login_to_github_registry != "true":
-            console.print(
-                "\n[blue]Skip logging in to GitHub Registry.\
-                    AIRFLOW_LOGIN_TO_GITHUB_REGISTRY is set as false"
-            )
-        elif len(build_params.github_token) > 0:
-            run_command(['docker', 'logout', 'ghcr.io'], verbose=True, text=True)
-            run_command(
-                [
-                    'docker',
-                    'login',
-                    '--username',
-                    build_params.github_username,
-                    '--password-stdin',
-                    'ghcr.io',
-                ],
-                verbose=True,
-                text=True,
-                input=build_params.github_token,
-            )
-        else:
-            console.print('\n[blue]Skip Login to GitHub Container Registry as token is missing')
-
-
-def clean_docker_context_files():
-    with contextlib.suppress(FileNotFoundError):
-        context_files_to_delete = DOCKER_CONTEXT_DIR.glob('**/*')
-        for file_to_delete in context_files_to_delete:
-            if file_to_delete.name != 'README.md':
-                file_to_delete.unlink()
-
-
-def check_docker_context_files(install_from_docker_context_files: bool):
-    context_file = DOCKER_CONTEXT_DIR.glob('**/*')
-    number_of_context_files = len(
-        [context for context in context_file if context.is_file() and context.name != 'README.md']
-    )
-    if number_of_context_files == 0:
-        if install_from_docker_context_files:
-            console.print('[bright_yellow]\nERROR! You want to install packages from docker-context-files')
-            console.print('[bright_yellow]\n but there are no packages to install in this folder.')
-            sys.exit()
-    else:
-        if not install_from_docker_context_files:
-            console.print(
-                '[bright_yellow]\n ERROR! There are some extra files in docker-context-files except README.md'
-            )
-            console.print('[bright_yellow]\nAnd you did not choose --install-from-docker-context-files flag')
-            console.print(
-                '[bright_yellow]\nThis might result in unnecessary cache invalidation and long build times'
-            )
-            console.print(
-                '[bright_yellow]\nExiting now \
-                    - please restart the command with --cleanup-docker-context-files switch'
-            )
-            sys.exit()
-
-
-def build_production_image(verbose, **kwargs):
-    parameters_passed = filter_out_none(**kwargs)
-    prod_params = get_image_build_params(parameters_passed)
-    prod_params.print_info()
-    if prod_params.cleanup_docker_context_files:
-        clean_docker_context_files()
-    check_docker_context_files(prod_params.install_docker_context_files)
-    if prod_params.skip_building_prod_image:
-        console.print('[bright_yellow]\nSkip building production image. Assume the one we have is good!')
-        console.print('bright_yellow]\nYou must run Breeze2 build-prod-image before for all python versions!')
-    if prod_params.prepare_buildx_cache:
-        login_to_docker_registry(prod_params)
-
-    cmd = construct_docker_command(prod_params)
-    run_command(
-        ["docker", "rmi", "--no-prune", "--force", prod_params.airflow_prod_image_name],
-        verbose=verbose,
-        cwd=AIRFLOW_SOURCE,
-        text=True,
-        suppress_raise_exception=True,
-    )
-    run_command(cmd, verbose=verbose, cwd=AIRFLOW_SOURCE, text=True)
-    if prod_params.prepare_buildx_cache:
-        run_command(['docker', 'push', prod_params.airflow_prod_image_name], verbose=True, text=True)
-
-
-def get_image_build_params(parameters_passed: Dict[str, str]):
-    cacheable_parameters = {"python_version": 'PYTHON_MAJOR_MINOR_VERSION'}
-    prod_image_params = ProdParams(**parameters_passed)
-    for parameter, cache_key in cacheable_parameters.items():
-        value_from_parameter = parameters_passed.get(parameter)
-        if value_from_parameter:
-            write_to_cache_file(cache_key, value_from_parameter, check_allowed_values=True)
-            setattr(prod_image_params, parameter, value_from_parameter)
-        else:
-            is_cached, value = check_cache_and_write_if_not_cached(
-                cache_key, getattr(prod_image_params, parameter)
-            )
-            if is_cached:
-                setattr(prod_image_params, parameter, value)
-    return prod_image_params
diff --git a/dev/breeze/src/airflow_breeze/shell/__init__.py b/dev/breeze/src/airflow_breeze/shell/__init__.py
index 13a83393a9..209c3a09f9 100644
--- a/dev/breeze/src/airflow_breeze/shell/__init__.py
+++ b/dev/breeze/src/airflow_breeze/shell/__init__.py
@@ -14,3 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Entering Shell"""
diff --git a/dev/breeze/src/airflow_breeze/shell/enter_shell.py b/dev/breeze/src/airflow_breeze/shell/enter_shell.py
index 783ca6348d..4fc710ebad 100644
--- a/dev/breeze/src/airflow_breeze/shell/enter_shell.py
+++ b/dev/breeze/src/airflow_breeze/shell/enter_shell.py
@@ -14,225 +14,194 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Command to enter container shell for Breeze."""
 import sys
 from pathlib import Path
 from typing import Dict
 
 import click
-from inputimeout import TimeoutOccurred, inputimeout
 
 from airflow_breeze import global_constants
-from airflow_breeze.cache import (
-    check_cache_and_write_if_not_cached,
+from airflow_breeze.build_image.ci.build_ci_image import build_image
+from airflow_breeze.shell.shell_params import ShellParams
+from airflow_breeze.utils.cache import (
+    check_cached_value_is_allowed,
     read_from_cache_file,
     write_to_cache_file,
 )
-from airflow_breeze.ci.build_image import build_image
-from airflow_breeze.console import console
-from airflow_breeze.global_constants import (
-    FLOWER_HOST_PORT,
-    MSSQL_HOST_PORT,
-    MSSQL_VERSION,
-    MYSQL_HOST_PORT,
-    MYSQL_VERSION,
-    POSTGRES_HOST_PORT,
-    POSTGRES_VERSION,
-    REDIS_HOST_PORT,
-    SSH_PORT,
-    WEBSERVER_HOST_PORT,
-)
-from airflow_breeze.shell.shell_builder import ShellBuilder
+from airflow_breeze.utils.console import console
 from airflow_breeze.utils.docker_command_utils import (
+    SOURCE_OF_DEFAULT_VALUES_FOR_VARIABLES,
+    VARIABLES_IN_CACHE,
     check_docker_compose_version,
     check_docker_resources,
     check_docker_version,
+    construct_env_variables_docker_compose_command,
 )
+from airflow_breeze.utils.md5_build_check import md5sum_check_if_build_is_needed
 from airflow_breeze.utils.path_utils import BUILD_CACHE_DIR
-from airflow_breeze.utils.run_utils import (
-    filter_out_none,
-    get_latest_sha,
-    instruct_build_image,
-    instruct_for_setup,
-    is_repo_rebased,
-    md5sum_check_if_build_is_needed,
-    run_command,
-)
-from airflow_breeze.visuals import ASCIIART, ASCIIART_STYLE, CHEATSHEET, CHEATSHEET_STYLE
-
-PARAMS_TO_ENTER_SHELL = {
-    "HOST_USER_ID": "host_user_id",
-    "HOST_GROUP_ID": "host_group_id",
-    "COMPOSE_FILE": "compose_files",
-    "PYTHON_MAJOR_MINOR_VERSION": "python_version",
-    "BACKEND": "backend",
-    "AIRFLOW_VERSION": "airflow_version",
-    "INSTALL_AIRFLOW_VERSION": "install_airflow_version",
-    "AIRFLOW_SOURCES": "airflow_sources",
-    "AIRFLOW_CI_IMAGE": "airflow_ci_image_name",
-    "AIRFLOW_CI_IMAGE_WITH_TAG": "airflow_ci_image_name_with_tag",
-    "AIRFLOW_PROD_IMAGE": "airflow_prod_image_name",
-    "AIRFLOW_IMAGE_KUBERNETES": "airflow_image_kubernetes",
-    "SQLITE_URL": "sqlite_url",
-    "USE_AIRFLOW_VERSION": "use_airflow_version",
-    "SKIP_TWINE_CHECK": "skip_twine_check",
-    "USE_PACKAGES_FROM_DIST": "use_packages_from_dist",
-    "EXECUTOR": "executor",
-    "START_AIRFLOW": "start_airflow",
-    "ENABLED_INTEGRATIONS": "enabled_integrations",
-    "GITHUB_ACTIONS": "github_actions",
-    "ISSUE_ID": "issue_id",
-    "NUM_RUNS": "num_runs",
-    "VERSION_SUFFIX_FOR_SVN": "version_suffix_for_svn",
-    "VERSION_SUFFIX_FOR_PYPI": "version_suffix_for_pypi",
-}
-
-PARAMS_FOR_SHELL_CONSTANTS = {
-    "SSH_PORT": SSH_PORT,
-    "WEBSERVER_HOST_PORT": WEBSERVER_HOST_PORT,
-    "FLOWER_HOST_PORT": FLOWER_HOST_PORT,
-    "REDIS_HOST_PORT": REDIS_HOST_PORT,
-    "MYSQL_HOST_PORT": MYSQL_HOST_PORT,
-    "MYSQL_VERSION": MYSQL_VERSION,
-    "MSSQL_HOST_PORT": MSSQL_HOST_PORT,
-    "MSSQL_VERSION": MSSQL_VERSION,
-    "POSTGRES_HOST_PORT": POSTGRES_HOST_PORT,
-    "POSTGRES_VERSION": POSTGRES_VERSION,
-}
-
-PARAMS_IN_CACHE = {
-    'python_version': 'PYTHON_MAJOR_MINOR_VERSION',
-    'backend': 'BACKEND',
-    'executor': 'EXECUTOR',
-    'postgres_version': 'POSTGRES_VERSION',
-    'mysql_version': 'MYSQL_VERSION',
-    'mssql_version': 'MSSQL_VERSION',
-}
-
-DEFAULT_VALUES_FOR_PARAM = {
-    'python_version': 'DEFAULT_PYTHON_MAJOR_MINOR_VERSION',
-    'backend': 'DEFAULT_BACKEND',
-    'executor': 'DEFAULT_EXECUTOR',
-    'postgres_version': 'POSTGRES_VERSION',
-    'mysql_version': 'MYSQL_VERSION',
-    'mssql_version': 'MSSQL_VERSION',
-}
-
-
-def construct_env_variables_docker_compose_command(shell_params: ShellBuilder) -> Dict[str, str]:
-    env_variables: Dict[str, str] = {}
-    for param_name in PARAMS_TO_ENTER_SHELL:
-        param_value = PARAMS_TO_ENTER_SHELL[param_name]
-        env_variables[param_name] = str(getattr(shell_params, param_value))
-    for constant_param_name in PARAMS_FOR_SHELL_CONSTANTS:
-        constant_param_value = PARAMS_FOR_SHELL_CONSTANTS[constant_param_name]
-        env_variables[constant_param_name] = str(constant_param_value)
-    return env_variables
-
-
-def build_image_if_needed_steps(verbose: bool, shell_params: ShellBuilder):
+from airflow_breeze.utils.run_utils import filter_out_none, instruct_build_image, is_repo_rebased, run_command
+from airflow_breeze.utils.visuals import ASCIIART, ASCIIART_STYLE, CHEATSHEET, CHEATSHEET_STYLE
+
+
+def build_image_if_needed_steps(verbose: bool, dry_run: bool, shell_params: ShellParams) -> None:
+    """
+    Check if image build is needed based on what files have been modified since last build.
+
+    * If build is needed, the user is asked for confirmation
+    * If the branch is not rebased it warns the user to rebase (to make sure latest remote cache is useful)
+    * Builds Image/Skips/Quits depending on the answer
+
+    :param verbose: print commands when running
+    :param dry_run: do not execute "write" commands - just print what would happen
+    :param shell_params: parameters for the build
+    """
+    # We import those locally so that click autocomplete works
+    from inputimeout import TimeoutOccurred, inputimeout
+
     build_needed = md5sum_check_if_build_is_needed(shell_params.md5sum_cache_dir, shell_params.the_image_type)
-    if build_needed:
-        try:
-            user_status = inputimeout(
-                prompt='\nDo you want to build image?Press y/n/q in 5 seconds\n',
-                timeout=5,
-            )
-            if user_status == 'y':
-                latest_sha = get_latest_sha(shell_params.github_repository, shell_params.airflow_branch)
-                if is_repo_rebased(latest_sha):
+    if not build_needed:
+        return
+    try:
+        user_status = inputimeout(
+            prompt='\nDo you want to build image?Press y/n/q in 5 seconds\n',
+            timeout=5,
+        )
+        if user_status in ['y', 'yes', 'Y', 'Yes', 'YES']:
+            if is_repo_rebased(shell_params.github_repository, shell_params.airflow_branch):
+                build_image(
+                    verbose,
+                    dry_run=dry_run,
+                    python=shell_params.python,
+                    upgrade_to_newer_dependencies="false",
+                )
+            else:
+                console.print(
+                    "\n[bright_yellow]This might take a lot of time, w"
+                    "e think you should rebase first.[/]\n"
+                )
+                if click.confirm("But if you really, really want - you can do it"):
                     build_image(
-                        verbose,
-                        python_version=shell_params.python_version,
+                        verbose=verbose,
+                        dry_run=dry_run,
+                        python=shell_params.python,
                         upgrade_to_newer_dependencies="false",
                     )
                 else:
-                    if click.confirm(
-                        "\nThis might take a lot of time, we think you should rebase first. \
-                            But if you really, really want - you can do it\n"
-                    ):
-                        build_image(
-                            verbose,
-                            python_version=shell_params.python_version,
-                            upgrade_to_newer_dependencies="false",
-                        )
-                    else:
-                        console.print(
-                            '\nPlease rebase your code before continuing.\
-                                Check this link to know more \
-                                     https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#id15\n'
-                        )
-                        console.print('Exiting the process')
-                        sys.exit()
-            elif user_status == 'n':
-                instruct_build_image(shell_params.the_image_type, shell_params.python_version)
-            elif user_status == 'q':
-                console.print('\nQuitting the process')
-                sys.exit()
-            else:
-                console.print('\nYou have given a wrong choice:', user_status, ' Quitting the process')
-                sys.exit()
-        except TimeoutOccurred:
-            console.print('\nTimeout. Considering your response as No\n')
-            instruct_build_image(shell_params.the_image_type, shell_params.python_version)
-        except Exception:
-            console.print('\nTerminating the process')
+                    console.print(
+                        "[bright_blue]Please rebase your code before continuing.[/]\n"
+                        "Check this link to know more "
+                        "https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#id15\n"
+                    )
+                    console.print('[red]Exiting the process[/]\n')
+                    sys.exit(1)
+        elif user_status in ['n', 'no', 'N', 'No', 'NO']:
+            instruct_build_image(shell_params.python)
+        elif user_status in ['q', 'quit', 'Q', 'Quit', 'QUIT']:
+            console.print('\n[bright_yellow]Quitting the process[/]\n')
             sys.exit()
-
-
-def build_image_checks(verbose: bool, shell_params: ShellBuilder):
+        else:
+            console.print(
+                f'\n[red]You have given a wrong choice: {user_status}.' f' Quitting the process[/]\n'
+            )
+            sys.exit()
+    except TimeoutOccurred:
+        console.print('\nTimeout. Considering your response as No\n')
+        instruct_build_image(shell_params.python)
+    except Exception as e:
+        console.print(f'\nTerminating the process on {e}')
+        sys.exit(1)
+
+
+def run_shell_with_build_image_checks(verbose: bool, dry_run: bool, shell_params: ShellParams):
+    """
+    Executes shell command built from params passed, checking if build is not needed.
+    * checks if there are enough resources to run shell
+    * checks if image was built at least once (if not - forces the build)
+    * if not forces, checks if build is needed and asks the user if so
+    * builds the image if needed
+    * prints information about the build
+    * constructs docker compose command to enter shell
+    * executes it
+
+    :param verbose: print commands when running
+    :param dry_run: do not execute "write" commands - just print what would happen
+    :param shell_params: parameters of the execution
+    """
+    check_docker_resources(verbose, shell_params.airflow_image_name)
     build_ci_image_check_cache = Path(
-        BUILD_CACHE_DIR, shell_params.airflow_branch, f".built_{shell_params.python_version}"
+        BUILD_CACHE_DIR, shell_params.airflow_branch, f".built_{shell_params.python}"
     )
     if build_ci_image_check_cache.exists():
-        console.print(f'{shell_params.the_image_type} image already built locally.')
+        console.print(f'[bright_blue]{shell_params.the_image_type} image already built locally.[/]')
     else:
-        console.print(f'{shell_params.the_image_type} image not built locally')
+        console.print(
+            f'[bright_yellow]{shell_params.the_image_type} image not built locally. ' f'Forcing build.[/]'
+        )
+        shell_params.force_build = True
 
     if not shell_params.force_build:
-        build_image_if_needed_steps(verbose, shell_params)
+        build_image_if_needed_steps(verbose, dry_run, shell_params)
     else:
         build_image(
             verbose,
-            python_version=shell_params.python_version,
+            dry_run=dry_run,
+            python=shell_params.python,
             upgrade_to_newer_dependencies="false",
         )
-
-    instruct_for_setup()
-    check_docker_resources(verbose, str(shell_params.airflow_sources), shell_params.airflow_ci_image_name)
-    cmd = ['docker-compose', 'run', '--service-ports', '--rm', 'airflow']
+    shell_params.print_badge_info()
+    cmd = ['docker-compose', 'run', '--service-ports', "-e", "BREEZE", '--rm', 'airflow']
     cmd_added = shell_params.command_passed
     env_variables = construct_env_variables_docker_compose_command(shell_params)
     if cmd_added is not None:
         cmd.extend(['-c', cmd_added])
-    if verbose:
-        shell_params.print_badge_info()
-    output = run_command(cmd, verbose=verbose, env=env_variables, text=True)
-    if verbose:
-        console.print(f"[blue]{output}[/]")
-
-
-def get_cached_params(user_params) -> Dict:
-    updated_params = dict(user_params)
-    for param in PARAMS_IN_CACHE:
-        if param in user_params:
-            param_name = PARAMS_IN_CACHE[param]
-            user_param_value = user_params[param]
+    run_command(cmd, verbose=verbose, dry_run=dry_run, env=env_variables, text=True)
+
+
+def synchronize_cached_params(parameters_passed_by_the_user: Dict[str, str]) -> Dict[str, str]:
+    """
+    Synchronizes cached params with arguments passed via dictionary.
+
+    It will read from cache parameters that are missing and writes back in case user
+    actually provided new values for those parameters. It synchronizes all cacheable parameters.
+
+    :param parameters_passed_by_the_user: user args passed
+    :return: updated args
+    """
+    updated_params = dict(parameters_passed_by_the_user)
+    for param in VARIABLES_IN_CACHE:
+        if param in parameters_passed_by_the_user:
+            param_name = VARIABLES_IN_CACHE[param]
+            user_param_value = parameters_passed_by_the_user[param]
             if user_param_value is not None:
                 write_to_cache_file(param_name, user_param_value)
             else:
-                param_value = getattr(global_constants, DEFAULT_VALUES_FOR_PARAM[param])
-                _, user_param_value = check_cache_and_write_if_not_cached(param_name, param_value)
+                param_value = getattr(global_constants, SOURCE_OF_DEFAULT_VALUES_FOR_VARIABLES[param])
+                _, user_param_value = check_cached_value_is_allowed(param_name, param_value)
             updated_params[param] = user_param_value
     return updated_params
 
 
-def build_shell(verbose, **kwargs):
+def enter_shell(**kwargs):
+    """
+    Executes entering shell using the parameters passed as kwargs:
+
+    * checks if docker version is good
+    * checks if docker-compose version is good
+    * updates kwargs with cached parameters
+    * displays ASCIIART and CHEATSHEET unless disabled
+    * build ShellParams from the updated kwargs
+    * executes the command to drop the user to Breeze shell
+
+    """
+    verbose = kwargs['verbose']
+    dry_run = kwargs['dry_run']
     check_docker_version(verbose)
     check_docker_compose_version(verbose)
-    updated_kwargs = get_cached_params(kwargs)
+    updated_kwargs = synchronize_cached_params(kwargs)
     if read_from_cache_file('suppress_asciiart') is None:
         console.print(ASCIIART, style=ASCIIART_STYLE)
     if read_from_cache_file('suppress_cheatsheet') is None:
         console.print(CHEATSHEET, style=CHEATSHEET_STYLE)
-    enter_shell_params = ShellBuilder(**filter_out_none(**updated_kwargs))
-    build_image_checks(verbose, enter_shell_params)
+    enter_shell_params = ShellParams(**filter_out_none(**updated_kwargs))
+    run_shell_with_build_image_checks(verbose, dry_run, enter_shell_params)
diff --git a/dev/breeze/src/airflow_breeze/shell/shell_builder.py b/dev/breeze/src/airflow_breeze/shell/shell_params.py
similarity index 73%
rename from dev/breeze/src/airflow_breeze/shell/shell_builder.py
rename to dev/breeze/src/airflow_breeze/shell/shell_params.py
index 051b2d3936..4975fa6b34 100644
--- a/dev/breeze/src/airflow_breeze/shell/shell_builder.py
+++ b/dev/breeze/src/airflow_breeze/shell/shell_params.py
@@ -14,49 +14,63 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+"""Breeze shell paameters."""
 from dataclasses import dataclass
 from pathlib import Path
 from typing import Tuple
 
 from airflow_breeze.branch_defaults import AIRFLOW_BRANCH
-from airflow_breeze.console import console
-from airflow_breeze.global_constants import AVAILABLE_INTEGRATIONS, get_airflow_version
+from airflow_breeze.global_constants import (
+    ALLOWED_BACKENDS,
+    ALLOWED_MSSQL_VERSIONS,
+    ALLOWED_MYSQL_VERSIONS,
+    ALLOWED_POSTGRES_VERSIONS,
+    ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS,
+    AVAILABLE_INTEGRATIONS,
+    MOUNT_ALL,
+    MOUNT_SELECTED,
+    get_airflow_version,
+)
+from airflow_breeze.utils.console import console
 from airflow_breeze.utils.host_info_utils import get_host_group_id, get_host_user_id, get_stat_bin
-from airflow_breeze.utils.path_utils import AIRFLOW_SOURCE, BUILD_CACHE_DIR, SCRIPTS_CI_DIR
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, BUILD_CACHE_DIR, SCRIPTS_CI_DIR
 from airflow_breeze.utils.run_utils import get_filesystem_type, run_command
 
 
 @dataclass
-class ShellBuilder:
-    python_version: str  # check in cache
-    build_cache_local: bool
-    build_cache_pulled: bool
-    build_cache_disabled: bool
-    backend: str  # check in cache
-    integration: Tuple[str]  # check in cache
-    postgres_version: str  # check in cache
-    mssql_version: str  # check in cache
-    mysql_version: str  # check in cache
-    force_build: bool
-    extra_args: Tuple
+class ShellParams:
+    """
+    Shell parameters. Those parameters are used to determine command issued to run shell command.
+    """
+
+    verbose: bool
+    extra_args: Tuple = ()
+    force_build: bool = False
+    integration: Tuple[str, ...] = ()
+    postgres_version: str = ALLOWED_POSTGRES_VERSIONS[0]
+    mssql_version: str = ALLOWED_MSSQL_VERSIONS[0]
+    mysql_version: str = ALLOWED_MYSQL_VERSIONS[0]
+    backend: str = ALLOWED_BACKENDS[0]
+    python: str = ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS[0]
+    dry_run: bool = False
+    load_example_dags: bool = False
+    load_default_connections: bool = False
     use_airflow_version: str = ""
     install_airflow_version: str = ""
     tag: str = "latest"
     github_repository: str = "apache/airflow"
-    skip_mounting_local_sources: bool = False
-    mount_all_local_sources: bool = False
+    mount_sources: str = MOUNT_SELECTED
     forward_credentials: str = "false"
     airflow_branch: str = AIRFLOW_BRANCH
-    executor: str = "KubernetesExecutor"  # check in cache
     start_airflow: str = "false"
     skip_twine_check: str = ""
-    use_packages_from_dist: str = "false"
     github_actions: str = ""
     issue_id: str = ""
     num_runs: str = ""
     version_suffix_for_pypi: str = ""
     version_suffix_for_svn: str = ""
+    db_reset: bool = False
+    ci: bool = False
 
     @property
     def airflow_version(self):
@@ -64,7 +78,7 @@ class ShellBuilder:
 
     @property
     def airflow_version_for_production_image(self):
-        cmd = ['docker', 'run', '--entrypoint', '/bin/bash', f'{self.airflow_prod_image_name}']
+        cmd = ['docker', 'run', '--entrypoint', '/bin/bash', f'{self.airflow_image_name}']
         cmd.extend(['-c', 'echo "${AIRFLOW_VERSION}"'])
         output = run_command(cmd, capture_output=True, text=True)
         return output.stdout.strip()
@@ -78,51 +92,29 @@ class ShellBuilder:
         return get_host_group_id()
 
     @property
-    def airflow_image_name(self) -> str:
+    def airflow_base_image_name(self) -> str:
         image = f'ghcr.io/{self.github_repository.lower()}'
         return image
 
     @property
-    def airflow_ci_image_name(self) -> str:
+    def airflow_image_name(self) -> str:
         """Construct CI image link"""
-        image = f'{self.airflow_image_name}/{self.airflow_branch}/ci/python{self.python_version}'
+        image = f'{self.airflow_base_image_name}/{self.airflow_branch}/ci/python{self.python}'
         return image
 
     @property
     def airflow_ci_image_name_with_tag(self) -> str:
-        image = self.airflow_ci_image_name
+        image = self.airflow_image_name
         return image if not self.tag else image + f":{self.tag}"
 
-    @property
-    def airflow_prod_image_name(self) -> str:
-        image = f'{self.airflow_image_name}/{self.airflow_branch}/prod/python{self.python_version}'
-        return image
-
     @property
     def airflow_image_kubernetes(self) -> str:
-        image = f'{self.airflow_image_name}/{self.airflow_branch}/kubernetes/python{self.python_version}'
+        image = f'{self.airflow_base_image_name}/{self.airflow_branch}/kubernetes/python{self.python}'
         return image
 
     @property
     def airflow_sources(self):
-        return AIRFLOW_SOURCE
-
-    @property
-    def docker_cache(self) -> str:
-        if self.build_cache_local:
-            docker_cache = "local"
-        elif self.build_cache_disabled:
-            docker_cache = "disabled"
-        else:
-            docker_cache = "pulled"
-        return docker_cache
-
-    @property
-    def mount_selected_local_sources(self) -> bool:
-        mount_selected_local_sources = True
-        if self.mount_all_local_sources or self.skip_mounting_local_sources:
-            mount_selected_local_sources = False
-        return mount_selected_local_sources
+        return AIRFLOW_SOURCES_ROOT
 
     @property
     def enabled_integrations(self) -> str:
@@ -139,14 +131,9 @@ class ShellBuilder:
         the_image_type = 'CI'
         return the_image_type
 
-    @property
-    def image_description(self) -> str:
-        image_description = 'Airflow CI'
-        return image_description
-
     @property
     def md5sum_cache_dir(self) -> Path:
-        cache_dir = Path(BUILD_CACHE_DIR, self.airflow_branch, self.python_version, self.the_image_type)
+        cache_dir = Path(BUILD_CACHE_DIR, self.airflow_branch, self.python, self.the_image_type)
         return cache_dir
 
     @property
@@ -170,7 +157,7 @@ class ShellBuilder:
         console.print(f'Branch Name: {self.airflow_branch}')
         console.print(f'Docker Image: {self.airflow_ci_image_name_with_tag}')
         console.print(f'Airflow source version:{self.airflow_version}')
-        console.print(f'Python Version: {self.python_version}')
+        console.print(f'Python Version: {self.python}')
         console.print(f'Backend: {self.backend} {self.backend_version}')
         console.print(f'Airflow used at runtime: {self.use_airflow_version}')
 
@@ -202,10 +189,13 @@ class ShellBuilder:
             [main_ci_docker_compose_file, backend_docker_compose_file, files_docker_compose_file]
         )
 
-        if self.mount_selected_local_sources:
-            compose_ci_file.extend([local_docker_compose_file, backend_port_docker_compose_file])
-        if self.mount_all_local_sources:
-            compose_ci_file.extend([local_all_sources_docker_compose_file, backend_port_docker_compose_file])
+        if self.mount_sources == MOUNT_SELECTED:
+            compose_ci_file.extend([local_docker_compose_file])
+        elif self.mount_sources == MOUNT_ALL:
+            compose_ci_file.extend([local_all_sources_docker_compose_file])
+        else:  # none
+            compose_ci_file.extend([remove_sources_docker_compose_file])
+        compose_ci_file.extend([backend_port_docker_compose_file])
         if self.forward_credentials:
             compose_ci_file.append(forward_credentials_docker_compose_file)
         if len(self.use_airflow_version) > 0:
diff --git a/dev/breeze/src/airflow_breeze/utils/__init__.py b/dev/breeze/src/airflow_breeze/utils/__init__.py
index 13a83393a9..73561b8457 100644
--- a/dev/breeze/src/airflow_breeze/utils/__init__.py
+++ b/dev/breeze/src/airflow_breeze/utils/__init__.py
@@ -14,3 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Utils used internally by Breeze."""
diff --git a/dev/breeze/src/airflow_breeze/utils/cache.py b/dev/breeze/src/airflow_breeze/utils/cache.py
new file mode 100644
index 0000000000..bc22516c41
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/utils/cache.py
@@ -0,0 +1,141 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Some of the arguments ("Python/Backend/Versions of the backend) are cached locally in
+".build" folder so that the last used value is used in the subsequent run if not specified.
+
+This allows to not remember what was the last version of Python used, if you just want to enter
+the shell with the same version as the "previous run".
+"""
+
+import sys
+from pathlib import Path
+from typing import Any, Dict, List, Optional, Tuple
+
+from airflow_breeze import global_constants
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.path_utils import BUILD_CACHE_DIR
+
+
+def check_if_cache_exists(param_name: str) -> bool:
+    return (Path(BUILD_CACHE_DIR) / f".{param_name}").exists()
+
+
+def read_from_cache_file(param_name: str) -> Optional[str]:
+    cache_exists = check_if_cache_exists(param_name)
+    if cache_exists:
+        return (Path(BUILD_CACHE_DIR) / f".{param_name}").read_text().strip()
+    else:
+        return None
+
+
+def touch_cache_file(param_name: str, root_dir: Path = BUILD_CACHE_DIR):
+    (Path(root_dir) / f".{param_name}").touch()
+
+
+def write_to_cache_file(param_name: str, param_value: str, check_allowed_values: bool = True) -> None:
+    """
+    Writs value to cache. If asked it can also check if the value is allowed for the parameter. and exit
+    in case the value is not allowed for that parameter instead of writing it.
+    :param param_name: name of the parameter
+    :param param_value: new value for the parameter
+    :param check_allowed_values: whether to fail if the parameter value is not allowed for that name.
+    """
+    allowed = False
+    allowed_values = None
+    if check_allowed_values:
+        allowed, allowed_values = check_if_values_allowed(param_name, param_value)
+    if allowed or not check_allowed_values:
+        print('BUILD CACHE DIR:', BUILD_CACHE_DIR)
+        cache_path = Path(BUILD_CACHE_DIR, f".{param_name}")
+        cache_path.parent.mkdir(parents=True, exist_ok=True)
+        cache_path.write_text(param_value)
+    else:
+        console.print(f'[cyan]You have sent the {param_value} for {param_name}')
+        console.print(f'[cyan]Allowed value for the {param_name} are {allowed_values}')
+        console.print('[cyan]Provide one of the supported params. Write to cache dir failed')
+        sys.exit(1)
+
+
+def check_cached_value_is_allowed(param_name: str, default_param_value: str) -> Tuple[bool, Optional[str]]:
+    """
+    Checks if the cache is present and whether its value is valid according to current rules.
+    It could happen that the allowed values have been modified since the last time cached value was set,
+    so this check is crucial to check outdated values.
+    If the value is not set or in case the cached value stored is not currently allowed,
+    the default value is stored in the cache and returned instead.
+
+    :param param_name: name of the parameter
+    :param default_param_value: default value of the parameter
+    :return: Tuple informing whether the value was read from cache and the parameter value that is
+         set in the cache after this method returns.
+    """
+    is_from_cache = False
+    cached_value = read_from_cache_file(param_name)
+    if cached_value is None:
+        write_to_cache_file(param_name, default_param_value)
+        cached_value = default_param_value
+    else:
+        allowed, allowed_values = check_if_values_allowed(param_name, cached_value)
+        if allowed:
+            is_from_cache = True
+        else:
+            write_to_cache_file(param_name, default_param_value)
+            cached_value = default_param_value
+    return is_from_cache, cached_value
+
+
+def check_if_values_allowed(param_name: str, param_value: str) -> Tuple[bool, List[Any]]:
+    """Checks if parameter value is allowed by looking at global constants."""
+    allowed = False
+    allowed_values = getattr(global_constants, f'ALLOWED_{param_name.upper()}S')
+    if param_value in allowed_values:
+        allowed = True
+    return allowed, allowed_values
+
+
+def delete_cache(param_name: str) -> bool:
+    """Deletes value from cache. Returns true if the delete operation happened (i.e. cache was present)."""
+    deleted = False
+    if check_if_cache_exists(param_name):
+        (Path(BUILD_CACHE_DIR) / f".{param_name}").unlink()
+        deleted = True
+    return deleted
+
+
+def synchronize_parameters_with_cache(
+    image_params: Any, parameters_passed_via_command_line: Dict[str, str]
+) -> None:
+    """
+    Synchronizes cacheable parameters between executions. It reads values from cache and updates
+    them wen done. It is only done for parameters that are relevant for image build.
+
+    :param image_params: parameters of the build
+    :param parameters_passed_via_command_line:  parameters that were passed by command line
+    """
+    cacheable_parameters = {
+        'python': 'PYTHON_MAJOR_MINOR_VERSION',
+    }
+    for parameter, cache_key in cacheable_parameters.items():
+        value_from_parameter = parameters_passed_via_command_line.get(parameter)
+        if value_from_parameter:
+            write_to_cache_file(cache_key, value_from_parameter, check_allowed_values=True)
+            setattr(image_params, parameter, value_from_parameter)
+        else:
+            is_cached, value = check_cached_value_is_allowed(cache_key, getattr(image_params, parameter))
+            if is_cached:
+                setattr(image_params, parameter, value)
diff --git a/dev/breeze/src/airflow_breeze/docs_generator/doc_builder.py b/dev/breeze/src/airflow_breeze/utils/console.py
similarity index 54%
rename from dev/breeze/src/airflow_breeze/docs_generator/doc_builder.py
rename to dev/breeze/src/airflow_breeze/utils/console.py
index 4759c2c2d8..ab7679f0f0 100644
--- a/dev/breeze/src/airflow_breeze/docs_generator/doc_builder.py
+++ b/dev/breeze/src/airflow_breeze/utils/console.py
@@ -14,25 +14,18 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""
+Console used by all processes. We are forcing colors and terminal output as Breeze is supposed
+to be only run in CI or real development terminal - in both cases we want to have colors on.
+"""
+try:
+    from rich.console import Console
+    from rich.theme import Theme
 
-from dataclasses import dataclass
-from typing import List, Tuple
+    custom_theme = Theme({"info": "blue", "warning": "magenta", "error": "red"})
+    console = Console(force_terminal=True, color_system="standard", width=180, theme=custom_theme)
 
-
-@dataclass
-class DocBuilder:
-    package_filter: Tuple[str]
-    docs_only: bool
-    spellcheck_only: bool
-
-    @property
-    def args_doc_builder(self) -> List[str]:
-        doc_args = []
-        if self.docs_only:
-            doc_args.append("--docs-only")
-        if self.spellcheck_only:
-            doc_args.append("--spellcheck-only")
-        if self.package_filter and len(self.package_filter) > 0:
-            for single_filter in self.package_filter:
-                doc_args.extend(["--package-filter", single_filter])
-        return doc_args
+except ImportError:
+    # We handle the ImportError so that autocomplete works with just click installed
+    custom_theme = None  # type: ignore[assignment]
+    console = None  # type: ignore[assignment]
diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
index 33fcf72a42..70e5ebb302 100644
--- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
@@ -14,20 +14,44 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""Various utils to prepare docker and docker compose commands."""
+import os
 import re
 import subprocess
-from typing import List
+from typing import Dict, List, Union
 
-from packaging import version
+from airflow_breeze.build_image.ci.build_ci_params import BuildCiParams
+from airflow_breeze.build_image.prod.build_prod_params import BuildProdParams
+from airflow_breeze.shell.shell_params import ShellParams
+from airflow_breeze.utils.host_info_utils import get_host_os
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT
 
-from airflow_breeze.console import console
+try:
+    from packaging import version
+except ImportError:
+    # We handle the ImportError so that autocomplete works with just click installed
+    version = None  # type: ignore[assignment]
+
+from airflow_breeze.branch_defaults import AIRFLOW_BRANCH, DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
 from airflow_breeze.global_constants import (
+    FLOWER_HOST_PORT,
     MIN_DOCKER_COMPOSE_VERSION,
     MIN_DOCKER_VERSION,
-    MOUNT_ALL_LOCAL_SOURCES,
-    MOUNT_SELECTED_LOCAL_SOURCES,
+    MOUNT_ALL,
+    MOUNT_NONE,
+    MOUNT_SELECTED,
+    MSSQL_HOST_PORT,
+    MSSQL_VERSION,
+    MYSQL_HOST_PORT,
+    MYSQL_VERSION,
+    POSTGRES_HOST_PORT,
+    POSTGRES_VERSION,
+    REDIS_HOST_PORT,
+    SSH_PORT,
+    WEBSERVER_HOST_PORT,
 )
-from airflow_breeze.utils.run_utils import run_command
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.run_utils import commit_sha, prepare_build_command, run_command
 
 NECESSARY_HOST_VOLUMES = [
     "/.bash_aliases:/root/.bash_aliases:cached",
@@ -52,7 +76,7 @@ NECESSARY_HOST_VOLUMES = [
     "/pyproject.toml:/opt/airflow/pyproject.toml:cached",
     "/pytest.ini:/opt/airflow/pytest.ini:cached",
     "/scripts:/opt/airflow/scripts:cached",
-    "/scripts/in_container/entrypoint_ci.sh:/entrypoint:cached",
+    "/scripts/docker/entrypoint_ci.sh:/entrypoint:cached",
     "/setup.cfg:/opt/airflow/setup.cfg:cached",
     "/setup.py:/opt/airflow/setup.py:cached",
     "/tests:/opt/airflow/tests:cached",
@@ -63,46 +87,61 @@ NECESSARY_HOST_VOLUMES = [
 ]
 
 
-def get_extra_docker_flags(all: bool, selected: bool, airflow_sources: str) -> List:
-    # get_extra_docker_flags(False, str(airflow_source))
-    # add verbosity
-    EXTRA_DOCKER_FLAGS = []
-    if all:
-        EXTRA_DOCKER_FLAGS.extend(["-v", f"{airflow_sources}:/opt/airflow/:cached"])
-    elif selected:
+def get_extra_docker_flags(mount_sources: str) -> List[str]:
+    """
+    Returns extra docker flags based on the type of mounting we want to do for sources.
+    :param mount_sources: type of mounting we want to have
+    :return: extra flag as list of strings
+    """
+    extra_docker_flags = []
+    if mount_sources == MOUNT_ALL:
+        extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}:/opt/airflow/:cached"])
+    elif mount_sources == MOUNT_SELECTED:
         for flag in NECESSARY_HOST_VOLUMES:
-            EXTRA_DOCKER_FLAGS.extend(["-v", airflow_sources + flag])
-    else:
-        console.print('Skip mounting host volumes to Docker')
-    EXTRA_DOCKER_FLAGS.extend(["-v", f"{airflow_sources}/files:/files"])
-    EXTRA_DOCKER_FLAGS.extend(["-v", f"{airflow_sources}/dist:/dist"])
-    EXTRA_DOCKER_FLAGS.extend(["--rm"])
-    EXTRA_DOCKER_FLAGS.extend(["--env-file", f"{airflow_sources}/scripts/ci/docker-compose/_docker.env"])
-    return EXTRA_DOCKER_FLAGS
+            extra_docker_flags.extend(["-v", str(AIRFLOW_SOURCES_ROOT) + flag])
+    else:  # none
+        console.print('[bright_blue]Skip mounting host volumes to Docker[/]')
+    extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}/files:/files"])
+    extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}/dist:/dist"])
+    extra_docker_flags.extend(["--rm"])
+    extra_docker_flags.extend(["--env-file", f"{AIRFLOW_SOURCES_ROOT}/scripts/ci/docker-compose/_docker.env"])
+    return extra_docker_flags
 
 
-def check_docker_resources(verbose: bool, airflow_sources: str, airflow_ci_image_name: str):
-    extra_docker_flags = get_extra_docker_flags(
-        MOUNT_ALL_LOCAL_SOURCES, MOUNT_SELECTED_LOCAL_SOURCES, airflow_sources
-    )
+def check_docker_resources(verbose: bool, airflow_image_name: str):
+    """
+    Check if we have enough resources to run docker. This is done via running script embedded in our image.
+    :param verbose: print commands when running
+    :param airflow_image_name: name of the airflow image to use.
+    """
+    extra_docker_flags = get_extra_docker_flags(MOUNT_NONE)
     cmd = []
     cmd.extend(["docker", "run", "-t"])
     cmd.extend(extra_docker_flags)
-    cmd.extend(["--entrypoint", "/bin/bash", airflow_ci_image_name])
+    cmd.extend(["--entrypoint", "/bin/bash", airflow_image_name])
     cmd.extend(["-c", "python /opt/airflow/scripts/in_container/run_resource_check.py"])
     run_command(cmd, verbose=verbose, text=True)
 
 
 def check_docker_permission(verbose) -> bool:
+    """
+    Checks if we have permission to write to docker socket. By default, on Linux you need to add your user
+    to docker group and some new users do not realize that. We help those users if we have
+    permission to run docker commands.
+
+    :param verbose: print commands when running
+    :return: True if permission is denied.
+    """
     permission_denied = False
     docker_permission_command = ["docker", "info"]
     try:
         _ = run_command(
             docker_permission_command,
             verbose=verbose,
-            suppress_console_print=True,
+            no_output_dump_on_exception=True,
             capture_output=True,
             text=True,
+            check=True,
         )
     except subprocess.CalledProcessError as ex:
         permission_denied = True
@@ -120,6 +159,13 @@ def compare_version(current_version: str, min_version: str) -> bool:
 
 
 def check_docker_version(verbose: bool):
+    """
+    Checks if the docker compose version is as expected (including some specific modifications done by
+    some vendors such as Microsoft (they might have modified version of docker-compose/docker in their
+    cloud. In case docker compose version is wrong we continue but print warning for the user.
+
+    :param verbose: print commands when running
+    """
     permission_denied = check_docker_permission(verbose)
     if not permission_denied:
         docker_version_command = ['docker', 'version', '--format', '{{.Client.Version}}']
@@ -127,7 +173,7 @@ def check_docker_version(verbose: bool):
         docker_version_output = run_command(
             docker_version_command,
             verbose=verbose,
-            suppress_console_print=True,
+            no_output_dump_on_exception=True,
             capture_output=True,
             text=True,
         )
@@ -150,12 +196,19 @@ def check_docker_version(verbose: bool):
 
 
 def check_docker_compose_version(verbose: bool):
+    """
+    Checks if the docker compose version is as expected (including some specific modifications done by
+    some vendors such as Microsoft (they might have modified version of docker-compose/docker in their
+    cloud. In case docker compose version is wrong we continue but print warning for the user.
+
+    :param verbose: print commands when running
+    """
     version_pattern = re.compile(r'(\d+)\.(\d+)\.(\d+)')
     docker_compose_version_command = ["docker-compose", "--version"]
     docker_compose_version_output = run_command(
         docker_compose_version_command,
         verbose=verbose,
-        suppress_console_print=True,
+        no_output_dump_on_exception=True,
         capture_output=True,
         text=True,
     )
@@ -183,16 +236,193 @@ def check_docker_compose_version(verbose: bool):
         )
 
 
-def check_if_buildx_plugin_available(verbose: bool) -> bool:
-    is_buildx_available = False
-    check_buildx = ['docker', 'buildx', 'version']
-    docker_buildx_version_output = run_command(
-        check_buildx,
-        verbose=verbose,
-        suppress_console_print=True,
-        capture_output=True,
-        text=True,
+def construct_arguments_for_build_docker_command(
+    image_params: Union[BuildCiParams, BuildProdParams], required_args: List[str], optional_args: List[str]
+) -> List[str]:
+    """
+    Constructs docker compose command arguments list based on parameters passed
+    :param image_params: parameters of the image
+    :param required_args: build argument that are required
+    :param optional_args: build arguments that are optional (should not be used if missing or empty)
+    :return: list of `--build-arg` commands to use for the parameters passed
+    """
+    args_command = []
+    for param in required_args:
+        args_command.append("--build-arg")
+        args_command.append(param.upper() + "=" + str(getattr(image_params, param)))
+    for verify_param in optional_args:
+        param_value = str(getattr(image_params, verify_param))
+        if len(param_value) > 0:
+            args_command.append("--build-arg")
+            args_command.append(verify_param.upper() + "=" + param_value)
+    args_command.extend(image_params.docker_cache_ci_directive)
+    return args_command
+
+
+def construct_build_docker_command(
+    image_params: Union[BuildProdParams, BuildCiParams],
+    verbose: bool,
+    required_args: List[str],
+    optional_args: List[str],
+    production_image: bool,
+) -> List[str]:
+    """
+    Constructs docker compose command based on the parameters passed.
+    :param image_params: parameters of the image
+    :param verbose: print commands when running
+    :param required_args: build argument that are required
+    :param optional_args: build arguments that are optional (should not be used if missing or empty)
+    :param production_image: whether this is production image or ci image
+    :return: Command to run as list of string
+    """
+    arguments = construct_arguments_for_build_docker_command(
+        image_params, required_args=required_args, optional_args=optional_args
+    )
+    build_command = prepare_build_command(
+        prepare_buildx_cache=image_params.prepare_buildx_cache, verbose=verbose
     )
-    if docker_buildx_version_output.returncode == 0 and docker_buildx_version_output.stdout != '':
-        is_buildx_available = True
-    return is_buildx_available
+    build_flags = image_params.extra_docker_build_flags
+    final_command = []
+    final_command.extend(["docker"])
+    final_command.extend(build_command)
+    final_command.extend(build_flags)
+    final_command.extend(["--pull"])
+    final_command.extend(arguments)
+    final_command.extend(["-t", image_params.airflow_image_name, "--target", "main", "."])
+    final_command.extend(["-f", 'Dockerfile' if production_image else 'Dockerfile.ci'])
+    final_command.extend(["--platform", image_params.platform])
+    return final_command
+
+
+def set_value_to_default_if_not_set(env: Dict[str, str], name: str, default: str):
+    if env.get(name) is None:
+        env[name] = os.environ.get(name, default)
+
+
+def update_expected_environment_variables(env: Dict[str, str]) -> None:
+    """
+    Updates default values for unset environment variables.
+
+    :param env: environment variables to update with missing values if not set.
+    """
+    set_value_to_default_if_not_set(env, 'BREEZE', "true")
+    set_value_to_default_if_not_set(env, 'CI', "false")
+    set_value_to_default_if_not_set(env, 'CI_BUILD_ID', "0")
+    set_value_to_default_if_not_set(env, 'CI_EVENT_TYPE', "pull_request")
+    set_value_to_default_if_not_set(env, 'CI_JOB_ID', "0")
+    set_value_to_default_if_not_set(env, 'CI_TARGET_BRANCH', AIRFLOW_BRANCH)
+    set_value_to_default_if_not_set(env, 'CI_TARGET_REPO', "apache/airflow")
+    set_value_to_default_if_not_set(env, 'COMMIT_SHA', commit_sha())
+    set_value_to_default_if_not_set(env, 'DB_RESET', "false")
+    set_value_to_default_if_not_set(env, 'DEBIAN_VERSION', "bullseye")
+    set_value_to_default_if_not_set(env, 'DEFAULT_BRANCH', AIRFLOW_BRANCH)
+    set_value_to_default_if_not_set(env, 'DEFAULT_CONSTRAINTS_BRANCH', DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH)
+    set_value_to_default_if_not_set(env, 'ENABLED_SYSTEMS', "")
+    set_value_to_default_if_not_set(env, 'ENABLE_TEST_COVERAGE', "false")
+    set_value_to_default_if_not_set(env, 'GENERATE_CONSTRAINTS_MODE', "source-providers")
+    set_value_to_default_if_not_set(env, 'GITHUB_REGISTRY_PULL_IMAGE_TAG', "latest")
+    set_value_to_default_if_not_set(env, 'HOST_OS', get_host_os())
+    set_value_to_default_if_not_set(env, 'INIT_SCRIPT_FILE', "init.sh")
+    set_value_to_default_if_not_set(env, 'INSTALL_PROVIDERS_FROM_SOURCES', "true")
+    set_value_to_default_if_not_set(env, 'INSTALL_PROVIDERS_FROM_SOURCES', "true")
+    set_value_to_default_if_not_set(env, 'LIST_OF_INTEGRATION_TESTS_TO_RUN', "")
+    set_value_to_default_if_not_set(env, 'LOAD_DEFAULT_CONNECTIONS', "false")
+    set_value_to_default_if_not_set(env, 'LOAD_EXAMPLES', "false")
+    set_value_to_default_if_not_set(env, 'PACKAGE_FORMAT', "wheel")
+    set_value_to_default_if_not_set(env, 'PRINT_INFO_FROM_SCRIPTS', "true")
+    set_value_to_default_if_not_set(env, 'PYTHONDONTWRITEBYTECODE', "true")
+    set_value_to_default_if_not_set(env, 'RUN_SYSTEM_TESTS', "false")
+    set_value_to_default_if_not_set(env, 'RUN_TESTS', "false")
+    set_value_to_default_if_not_set(env, 'SKIP_ENVIRONMENT_INITIALIZATION', "false")
+    set_value_to_default_if_not_set(env, 'SKIP_SSH_SETUP', "false")
+    set_value_to_default_if_not_set(env, 'TEST_TYPE', "")
+    set_value_to_default_if_not_set(env, 'UPGRADE_TO_NEWER_DEPENDENCIES', "false")
+    set_value_to_default_if_not_set(env, 'USE_PACKAGES_FROM_DIST', "false")
+    set_value_to_default_if_not_set(env, 'USE_PACKAGES_FROM_DIST', "false")
+    set_value_to_default_if_not_set(env, 'VERBOSE', "false")
+    set_value_to_default_if_not_set(env, 'VERBOSE_COMMANDS', "false")
+    set_value_to_default_if_not_set(env, 'WHEEL_VERSION', "0.36.2")
+
+
+VARIABLES_TO_ENTER_DOCKER_COMPOSE = {
+    "AIRFLOW_CI_IMAGE": "airflow_image_name",
+    "AIRFLOW_CI_IMAGE_WITH_TAG": "airflow_ci_image_name_with_tag",
+    "AIRFLOW_IMAGE_KUBERNETES": "airflow_image_kubernetes",
+    "AIRFLOW_PROD_IMAGE": "airflow_image_name",
+    "AIRFLOW_SOURCES": "airflow_sources",
+    "AIRFLOW_VERSION": "airflow_version",
+    "BACKEND": "backend",
+    "COMPOSE_FILE": "compose_files",
+    "DB_RESET": 'db_reset',
+    "ENABLED_INTEGRATIONS": "enabled_integrations",
+    "GITHUB_ACTIONS": "github_actions",
+    "HOST_GROUP_ID": "host_group_id",
+    "HOST_USER_ID": "host_user_id",
+    "INSTALL_AIRFLOW_VERSION": "install_airflow_version",
+    "ISSUE_ID": "issue_id",
+    "LOAD_EXAMPLES": "load_example_dags",
+    "LOAD_DEFAULT_CONNECTIONS": "load_default_connections",
+    "NUM_RUNS": "num_runs",
+    "PYTHON_MAJOR_MINOR_VERSION": "python",
+    "SKIP_TWINE_CHECK": "skip_twine_check",
+    "SQLITE_URL": "sqlite_url",
+    "START_AIRFLOW": "start_airflow",
+    "USE_AIRFLOW_VERSION": "use_airflow_version",
+    "VERSION_SUFFIX_FOR_PYPI": "version_suffix_for_pypi",
+    "VERSION_SUFFIX_FOR_SVN": "version_suffix_for_svn",
+}
+
+VARIABLES_FOR_DOCKER_COMPOSE_CONSTANTS = {
+    "FLOWER_HOST_PORT": FLOWER_HOST_PORT,
+    "MSSQL_HOST_PORT": MSSQL_HOST_PORT,
+    "MSSQL_VERSION": MSSQL_VERSION,
+    "MYSQL_HOST_PORT": MYSQL_HOST_PORT,
+    "MYSQL_VERSION": MYSQL_VERSION,
+    "POSTGRES_HOST_PORT": POSTGRES_HOST_PORT,
+    "POSTGRES_VERSION": POSTGRES_VERSION,
+    "REDIS_HOST_PORT": REDIS_HOST_PORT,
+    "SSH_PORT": SSH_PORT,
+    "WEBSERVER_HOST_PORT": WEBSERVER_HOST_PORT,
+}
+
+VARIABLES_IN_CACHE = {
+    'backend': 'BACKEND',
+    'mssql_version': 'MSSQL_VERSION',
+    'mysql_version': 'MYSQL_VERSION',
+    'postgres_version': 'POSTGRES_VERSION',
+    'python': 'PYTHON_MAJOR_MINOR_VERSION',
+}
+
+SOURCE_OF_DEFAULT_VALUES_FOR_VARIABLES = {
+    'backend': 'DEFAULT_BACKEND',
+    'mssql_version': 'MSSQL_VERSION',
+    'mysql_version': 'MYSQL_VERSION',
+    'postgres_version': 'POSTGRES_VERSION',
+    'python': 'DEFAULT_PYTHON_MAJOR_MINOR_VERSION',
+}
+
+
+def construct_env_variables_docker_compose_command(shell_params: ShellParams) -> Dict[str, str]:
+    """
+    Constructs environment variables needed by the docker-compose command, based on Shell parameters
+    passed to it.
+
+    * It checks if appropriate params are defined for all the needed docker compose environment variables
+    * It sets the environment values from the parameters passed
+    * For the constant parameters that we do not have parameters for, we only override the constant values
+      if the env variable that we run with does not have it.
+    * Updates all other environment variables that docker-compose expects with default values if missing
+
+    :param shell_params: shell parameters passed
+    :return: dictionary of env variables to set
+    """
+    env_variables: Dict[str, str] = os.environ.copy()
+    for param_name in VARIABLES_TO_ENTER_DOCKER_COMPOSE:
+        param_value = VARIABLES_TO_ENTER_DOCKER_COMPOSE[param_name]
+        env_variables[param_name] = str(getattr(shell_params, param_value))
+    for constant_param_name in VARIABLES_FOR_DOCKER_COMPOSE_CONSTANTS:
+        constant_param_value = VARIABLES_FOR_DOCKER_COMPOSE_CONSTANTS[constant_param_name]
+        if not env_variables.get(constant_param_value):
+            env_variables[constant_param_name] = str(constant_param_value)
+    update_expected_environment_variables(env_variables)
+    return env_variables
diff --git a/dev/breeze/src/airflow_breeze/utils/host_info_utils.py b/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
index 07a91fe964..3f34153de3 100644
--- a/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
@@ -14,22 +14,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""
+Returns information about Host that should be passed to the docker-compose.
+"""
 import platform
 
 from airflow_breeze.utils.run_utils import run_command
 
-# DIRECTORIES_TO_FIX=(
-#             "/files"
-#             "/root/.aws"
-#             "/root/.azure"
-#             "/root/.config/gcloud"
-#             "/root/.docker"
-#             "/opt/airflow/logs"
-#             "/opt/airflow/docs"
-#             "/opt/airflow/dags"
-#             "${AIRFLOW_SOURCE}"
-#         )
-
 
 def get_host_user_id():
     host_user_id = ''
diff --git a/dev/breeze/src/airflow_breeze/utils/md5_build_check.py b/dev/breeze/src/airflow_breeze/utils/md5_build_check.py
new file mode 100644
index 0000000000..37191a1ef6
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/utils/md5_build_check.py
@@ -0,0 +1,116 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Utilities to check - with MD5 - whether files have been modified since the last successful build.
+"""
+import hashlib
+from pathlib import Path
+from typing import List, Tuple
+
+from airflow_breeze.global_constants import FILES_FOR_REBUILD_CHECK
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT
+
+
+def check_md5checksum_in_cache_modified(file_hash: str, cache_path: Path, update: bool) -> bool:
+    """
+    Check if the file hash is present in cache and it's content has been modified. Optionally updates
+    the hash.
+
+    :param file_hash: hash of the current version of the file
+    :param cache_path: path where the hash is stored
+    :param update: whether to update hash if it is found different
+    :return: True if the hash file was missing or hash has changed.
+    """
+    if cache_path.exists():
+        old_md5_checksum_content = Path(cache_path).read_text()
+        if old_md5_checksum_content.strip() != file_hash.strip():
+            if update:
+                save_md5_file(cache_path, file_hash)
+            return True
+    else:
+        if update:
+            save_md5_file(cache_path, file_hash)
+        return True
+    return False
+
+
+def generate_md5(filename, file_size: int = 65536):
+    """Generates md5 hash for the file."""
+    hash_md5 = hashlib.md5()
+    with open(filename, "rb") as f:
+        for file_chunk in iter(lambda: f.read(file_size), b""):
+            hash_md5.update(file_chunk)
+    return hash_md5.hexdigest()
+
+
+def calculate_md5_checksum_for_files(
+    md5sum_cache_dir: Path, update: bool = False
+) -> Tuple[List[str], List[str]]:
+    """
+    Calculates checksums for all interesting files and stores the hashes in the md5sum_cache_dir.
+    Optionally modifies the hashes.
+
+    :param md5sum_cache_dir: directory where to store cached information
+    :param update: whether to update the hashes
+    :return: Tuple of two lists: modified and not-modified files
+    """
+    not_modified_files = []
+    modified_files = []
+    for calculate_md5_file in FILES_FOR_REBUILD_CHECK:
+        file_to_get_md5 = AIRFLOW_SOURCES_ROOT / calculate_md5_file
+        md5_checksum = generate_md5(file_to_get_md5)
+        sub_dir_name = file_to_get_md5.parts[-2]
+        actual_file_name = file_to_get_md5.parts[-1]
+        cache_file_name = Path(md5sum_cache_dir, sub_dir_name + '-' + actual_file_name + '.md5sum')
+        file_content = md5_checksum + '  ' + str(file_to_get_md5) + '\n'
+        is_modified = check_md5checksum_in_cache_modified(file_content, cache_file_name, update=update)
+        if is_modified:
+            modified_files.append(calculate_md5_file)
+        else:
+            not_modified_files.append(calculate_md5_file)
+    return modified_files, not_modified_files
+
+
+def md5sum_check_if_build_is_needed(md5sum_cache_dir: Path, the_image_type: str) -> bool:
+    """
+    Checks if build is needed based on whether important files were modified.
+
+    :param md5sum_cache_dir: directory where cached md5 sums are stored
+    :param the_image_type: type of the image to check (PROD/CI)
+    :return: True if build is needed.
+    """
+    build_needed = False
+    modified_files, not_modified_files = calculate_md5_checksum_for_files(md5sum_cache_dir, update=False)
+    if len(modified_files) > 0:
+        console.print(
+            '[bright_yellow]The following files are modified since last time image was built: [/]\n\n'
+        )
+        for file in modified_files:
+            console.print(f" * [bright_blue]{file}[/]")
+        console.print(f'\n[bright_yellow]Likely {the_image_type} image needs rebuild[/]\n')
+        build_needed = True
+    else:
+        console.print(
+            f'Docker image build is not needed for {the_image_type} build as no important files are changed!'
+        )
+    return build_needed
+
+
+def save_md5_file(cache_path: Path, file_content: str) -> None:
+    cache_path.parent.mkdir(parents=True, exist_ok=True)
+    cache_path.write_text(file_content)
diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py
index 81c08aa7bf..6ce3f34def 100644
--- a/dev/breeze/src/airflow_breeze/utils/path_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py
@@ -14,66 +14,83 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""
+Useful tools for various Paths used inside Airflow Sources.
+"""
+
 import os
 import tempfile
 from pathlib import Path
 from typing import Optional
 
-from airflow_breeze.console import console
-
-__AIRFLOW_SOURCES_ROOT = Path.cwd()
-
-__AIRFLOW_CFG_FILE = "setup.cfg"
-
+from airflow_breeze.utils.console import console
 
-def get_airflow_sources_root():
-    return __AIRFLOW_SOURCES_ROOT
+AIRFLOW_CFG_FILE = "setup.cfg"
 
 
 def search_upwards_for_airflow_sources_root(start_from: Path) -> Optional[Path]:
     root = Path(start_from.root)
     d = start_from
     while d != root:
-        attempt = d / __AIRFLOW_CFG_FILE
+        attempt = d / AIRFLOW_CFG_FILE
         if attempt.exists() and "name = apache-airflow\n" in attempt.read_text():
             return attempt.parent
         d = d.parent
     return None
 
 
-def find_airflow_sources_root():
+def find_airflow_sources_root() -> Path:
+    """
+    Find the root of airflow sources. When Breeze is run from sources, it is easy, but this one also
+    has to handle the case when Breeze is installed via `pipx` so it searches upwards of the current
+    directory to find the right root of airflow directory.
+
+    If not found, current directory is returned (this handles the case when Breeze is run from the local
+    directory.
+
+    :return: Path for the found sources.
+
+    """
+    default_airflow_sources_root = Path.cwd()
     # Try to find airflow sources in current working dir
     airflow_sources_root = search_upwards_for_airflow_sources_root(Path.cwd())
     if not airflow_sources_root:
         # Or if it fails, find it in parents of the directory where the ./breeze.py is.
         airflow_sources_root = search_upwards_for_airflow_sources_root(Path(__file__).resolve().parent)
-    global __AIRFLOW_SOURCES_ROOT
     if airflow_sources_root:
-        __AIRFLOW_SOURCES_ROOT = airflow_sources_root
+        os.chdir(airflow_sources_root)
+        return Path(airflow_sources_root)
     else:
-        console.print(f"\n[yellow]Could not find Airflow sources location. Assuming {__AIRFLOW_SOURCES_ROOT}")
-    os.chdir(__AIRFLOW_SOURCES_ROOT)
+        console.print(
+            f"\n[bright_yellow]Could not find Airflow sources location. "
+            f"Assuming {default_airflow_sources_root}"
+        )
+    os.chdir(default_airflow_sources_root)
+    return Path(default_airflow_sources_root)
+
 
+AIRFLOW_SOURCES_ROOT = find_airflow_sources_root()
 
-find_airflow_sources_root()
-AIRFLOW_SOURCE = get_airflow_sources_root()
-BUILD_CACHE_DIR = Path(AIRFLOW_SOURCE, '.build')
-FILES_DIR = Path(AIRFLOW_SOURCE, 'files')
-MSSQL_DATA_VOLUME = Path(BUILD_CACHE_DIR, 'tmp_mssql_volume')
-MYPY_CACHE_DIR = Path(AIRFLOW_SOURCE, '.mypy_cache')
-LOGS_DIR = Path(AIRFLOW_SOURCE, 'logs')
-DIST_DIR = Path(AIRFLOW_SOURCE, 'dist')
-SCRIPTS_CI_DIR = Path(AIRFLOW_SOURCE, 'scripts', 'ci')
-DOCKER_CONTEXT_DIR = Path(AIRFLOW_SOURCE, 'docker-context-files')
+BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / '.build'
+FILES_DIR = AIRFLOW_SOURCES_ROOT / 'files'
+MSSQL_DATA_VOLUME = AIRFLOW_SOURCES_ROOT / 'tmp_mssql_volume'
+MYPY_CACHE_DIR = AIRFLOW_SOURCES_ROOT / '.mypy_cache'
+LOGS_DIR = AIRFLOW_SOURCES_ROOT / 'logs'
+DIST_DIR = AIRFLOW_SOURCES_ROOT / 'dist'
+SCRIPTS_CI_DIR = AIRFLOW_SOURCES_ROOT / 'scripts' / 'ci'
+DOCKER_CONTEXT_DIR = AIRFLOW_SOURCES_ROOT / 'docker-context-files'
+CACHE_TMP_FILE_DIR = tempfile.TemporaryDirectory()
+OUTPUT_LOG = Path(CACHE_TMP_FILE_DIR.name, 'out.log')
 
 
-def create_directories():
+def create_directories() -> None:
+    """
+    Creates all directories that are needed for Breeze to work.
+    """
     BUILD_CACHE_DIR.mkdir(parents=True, exist_ok=True)
     FILES_DIR.mkdir(parents=True, exist_ok=True)
     MSSQL_DATA_VOLUME.mkdir(parents=True, exist_ok=True)
     MYPY_CACHE_DIR.mkdir(parents=True, exist_ok=True)
     LOGS_DIR.mkdir(parents=True, exist_ok=True)
     DIST_DIR.mkdir(parents=True, exist_ok=True)
-    CACHE_TMP_FILE_DIR = tempfile.TemporaryDirectory()
-    OUTPUT_LOG = Path(CACHE_TMP_FILE_DIR.name, 'out.log')
     OUTPUT_LOG.mkdir(parents=True, exist_ok=True)
diff --git a/dev/breeze/src/airflow_breeze/utils/registry.py b/dev/breeze/src/airflow_breeze/utils/registry.py
new file mode 100644
index 0000000000..83323649b7
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/utils/registry.py
@@ -0,0 +1,56 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import os
+from typing import Any
+
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.run_utils import run_command
+
+
+def login_to_docker_registry(image_params: Any):
+    """
+    In case of CI environment, we need to login to GitHub Registry if we want to prepare cache.
+    This method logs in using the params specified.
+
+    :param image_params: parameters to use for Building prod image
+    """
+    if os.environ.get("CI"):
+        if len(image_params.github_token) == 0:
+            console.print("\n[bright_blue]Skip logging in to GitHub Registry. No Token available!")
+        elif image_params.airflow_login_to_github_registry != "true":
+            console.print(
+                "\n[bright_blue]Skip logging in to GitHub Registry.\
+                    AIRFLOW_LOGIN_TO_GITHUB_REGISTRY is set as false"
+            )
+        elif len(image_params.github_token) > 0:
+            run_command(['docker', 'logout', 'ghcr.io'], verbose=True, text=True)
+            run_command(
+                [
+                    'docker',
+                    'login',
+                    '--username',
+                    image_params.github_username,
+                    '--password-stdin',
+                    'ghcr.io',
+                ],
+                verbose=True,
+                text=True,
+                input=image_params.github_token,
+            )
+        else:
+            console.print('\n[bright_blue]Skip Login to GitHub Container Registry as token is missing')
diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py
index 8553e3a8f2..da0ceeeb7e 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py
@@ -14,23 +14,20 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+"""Useful tools for running commands."""
 import contextlib
-import hashlib
 import os
-import re
 import shlex
 import shutil
 import stat
 import subprocess
-from copy import deepcopy
+import sys
+from functools import lru_cache
 from pathlib import Path
 from typing import Dict, List, Mapping, Optional
 
-from airflow_breeze.cache import update_md5checksum_in_cache
-from airflow_breeze.console import console
-from airflow_breeze.global_constants import FILES_FOR_REBUILD_CHECK
-from airflow_breeze.utils.path_utils import AIRFLOW_SOURCE
+from airflow_breeze.utils.console import console
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT
 
 
 def run_command(
@@ -38,67 +35,115 @@ def run_command(
     *,
     check: bool = True,
     verbose: bool = False,
-    suppress_raise_exception: bool = False,
-    suppress_console_print: bool = False,
+    dry_run: bool = False,
+    no_output_dump_on_exception: bool = False,
     env: Optional[Mapping[str, str]] = None,
     cwd: Optional[Path] = None,
     **kwargs,
-):
+) -> Optional[subprocess.CompletedProcess]:
+    """
+    Runs command passed as list of strings with some extra functionality over POpen (kwargs from PoPen can
+    be used in this command even if not explicitly specified).
+
+    It prints diagnostics when requested, also allows to "dry_run" the commands rather than actually
+    execute them.
+
+    An important factor for having this command running tool is to be able (in verbose mode) to directly
+    copy&paste the verbose output and run the command manually - including all the environment variables
+    needed to run the command.
+
+    :param cmd: command to run
+    :param check: whether to check status value and run exception (same as POpem)
+    :param verbose: print commands when running
+    :param dry_run: do not execute "the" command - just print what would happen
+    :param no_output_dump_on_exception: whether to suppress printing logs from output when command fails
+    :param env: mapping of environment variables to set for the run command
+    :param cwd: working directory to set for the command
+    :param kwargs: kwargs passed to POpen
+    """
     workdir: str = str(cwd) if cwd else os.getcwd()
-    if verbose:
+    if verbose or dry_run:
         command_to_print = ' '.join(shlex.quote(c) for c in cmd)
         # if we pass environment variables to execute, then
         env_to_print = ' '.join(f'{key}="{val}"' for (key, val) in env.items()) if env else ''
-        console.print(f"\n[blue]Working directory {workdir} [/]\n")
+        if env_to_print:
+            env_to_print += ' '
+        console.print(f"\n[bright_blue]Working directory {workdir} [/]\n")
         # Soft wrap allows to copy&paste and run resulting output as it has no hard EOL
-        console.print(f"\n[blue]{env_to_print} {command_to_print}[/]\n", soft_wrap=True)
-
+        console.print(f"\n[bright_blue]{env_to_print}{command_to_print}[/]\n", soft_wrap=True)
+        if dry_run:
+            return None
     try:
-        # copy existing environment variables
-        cmd_env = deepcopy(os.environ)
+        cmd_env = os.environ.copy()
         if env:
-            # Add environment variables passed as parameters
             cmd_env.update(env)
         return subprocess.run(cmd, check=check, env=cmd_env, cwd=workdir, **kwargs)
     except subprocess.CalledProcessError as ex:
-        if not suppress_console_print:
-            console.print("========================= OUTPUT start ============================")
-            console.print(ex.stderr)
-            console.print(ex.stdout)
-            console.print("========================= OUTPUT end ============================")
-        if not suppress_raise_exception:
+        if not no_output_dump_on_exception:
+            if ex.stdout:
+                console.print("[blue]========================= OUTPUT start ============================[/]")
+                console.print(ex.stdout)
+                console.print("[blue]========================= OUTPUT end ==============================[/]")
+            if ex.stderr:
+                console.print("[red]========================= STDERR start ============================[/]")
+                console.print(ex.stderr)
+                console.print("[red]========================= STDERR end ==============================[/]")
+        if not check:
             raise
+    return None
 
 
-def generate_md5(filename, file_size: int = 65536):
-    hash_md5 = hashlib.md5()
-    with open(filename, "rb") as f:
-        for file_chunk in iter(lambda: f.read(file_size), b""):
-            hash_md5.update(file_chunk)
-    return hash_md5.hexdigest()
-
-
-def filter_out_none(**kwargs) -> Dict:
-    for key in list(kwargs):
-        if kwargs[key] is None:
-            kwargs.pop(key)
-    return kwargs
+def check_pre_commit_installed(verbose: bool) -> bool:
+    """
+    Check if pre-commit is installed in the right version.
+    :param verbose: print commands when running
+    :return: True is the pre-commit is installed in the right version.
+    """
+    # Local import to make autocomplete work
+    import yaml
+    from pkg_resources import parse_version
 
+    pre_commit_config = yaml.safe_load((AIRFLOW_SOURCES_ROOT / ".pre-commit-config.yaml").read_text())
+    min_pre_commit_version = pre_commit_config["minimum_pre_commit_version"]
 
-def check_package_installed(package_name: str) -> bool:
+    pre_commit_name = "pre-commit"
     is_installed = False
-    if shutil.which('pre-commit') is not None:
-        is_installed = True
-        console.print(f"\n[blue]Package name {package_name} is installed to run static check test")
-    else:
-        console.print(
-            f"\n[red]Error: Package name {package_name} is not installed. \
-            Please install using https://pre-commit.com/#install to continue[/]\n"
+    if shutil.which(pre_commit_name) is not None:
+        process = run_command(
+            [pre_commit_name, "--version"], verbose=verbose, check=True, capture_output=True, text=True
         )
+        if process and process.stdout:
+            pre_commit_version = process.stdout.split(" ")[-1].strip()
+            if parse_version(pre_commit_version) >= parse_version(min_pre_commit_version):
+                console.print(
+                    f"\n[green]Package {pre_commit_name} is installed. "
+                    f"Good version {pre_commit_version} (>= {min_pre_commit_version})[/]\n"
+                )
+                is_installed = True
+            else:
+                console.print(
+                    f"\n[red]Package name {pre_commit_name} version is wrong. It should be"
+                    f"aat least {min_pre_commit_version} and is {pre_commit_version}.[/]\n\n"
+                )
+        else:
+            console.print(
+                "\n[bright_yellow]Could not determine version of pre-commit. "
+                "You might need to update it![/]\n"
+            )
+            is_installed = True
+    else:
+        console.print(f"\n[red]Error: Package name {pre_commit_name} is not installed.[/]")
+    if not is_installed:
+        console.print("\nPlease install using https://pre-commit.com/#install to continue\n")
     return is_installed
 
 
 def get_filesystem_type(filepath):
+    """
+    Determine the type of filesystem used - we might want to use different parameters if tmpfs is used.
+    :param filepath: path to check
+    :return: type of filesystem
+    """
     # We import it locally so that click autocomplete works
     import psutil
 
@@ -113,60 +158,21 @@ def get_filesystem_type(filepath):
     return root_type
 
 
-def calculate_md5_checksum_for_files(md5sum_cache_dir: Path):
-    not_modified_files = []
-    modified_files = []
-    for calculate_md5_file in FILES_FOR_REBUILD_CHECK:
-        file_to_get_md5 = Path(AIRFLOW_SOURCE, calculate_md5_file)
-        md5_checksum = generate_md5(file_to_get_md5)
-        sub_dir_name = file_to_get_md5.parts[-2]
-        actual_file_name = file_to_get_md5.parts[-1]
-        cache_file_name = Path(md5sum_cache_dir, sub_dir_name + '-' + actual_file_name + '.md5sum')
-        file_content = md5_checksum + '  ' + str(file_to_get_md5) + '\n'
-        is_modified = update_md5checksum_in_cache(file_content, cache_file_name)
-        if is_modified:
-            modified_files.append(calculate_md5_file)
-        else:
-            not_modified_files.append(calculate_md5_file)
-    return modified_files, not_modified_files
-
-
-def md5sum_check_if_build_is_needed(md5sum_cache_dir: Path, the_image_type: str) -> bool:
-    build_needed = False
-    modified_files, not_modified_files = calculate_md5_checksum_for_files(md5sum_cache_dir)
-    if len(modified_files) > 0:
-        console.print('The following files are modified: ', modified_files)
-        console.print(f'Likely {the_image_type} image needs rebuild')
-        build_needed = True
-    else:
-        console.print(
-            f'Docker image build is not needed for {the_image_type} build as no important files are changed!'
-        )
-    return build_needed
-
-
-def instruct_build_image(the_image_type: str, python_version: str):
-    console.print(f'\nThe {the_image_type} image for python version {python_version} may be outdated\n')
+def instruct_build_image(python: str):
+    """Print instructions to the user that they should build the image"""
+    console.print(f'[bright_yellow]\nThe CI image for ' f'python version {python} may be outdated[/]\n')
     console.print('Please run this command at earliest convenience:\n')
-    if the_image_type == 'CI':
-        console.print(f'./Breeze2 build-ci-image --python {python_version}')
-    else:
-        console.print(f'./Breeze2 build-prod-image --python {python_version}')
-    console.print("\nIf you run it via pre-commit as individual hook, you can run 'pre-commit run build'.\n")
-
-
-def instruct_for_setup():
-    CMDNAME = 'Breeze2'
-    console.print(f"\nYou can setup autocomplete by running {CMDNAME} setup-autocomplete'")
-    console.print("  You can toggle ascii/cheatsheet by running:")
-    console.print(f"      * {CMDNAME} toggle-suppress-cheatsheet")
-    console.print(f"      * {CMDNAME} toggle-suppress-asciiart\n")
+    console.print(f'      `./Breeze2 build-image --python {python}`\n')
 
 
 @contextlib.contextmanager
 def working_directory(source_path: Path):
+    """
     # Equivalent of pushd and popd in bash script.
     # https://stackoverflow.com/a/42441759/3101838
+    :param source_path:
+    :return:
+    """
     prev_cwd = Path.cwd()
     os.chdir(source_path)
     try:
@@ -176,6 +182,7 @@ def working_directory(source_path: Path):
 
 
 def change_file_permission(file_to_fix: Path):
+    """Update file permissions to not be group-writeable. Needed to solve cache invalidation problems."""
     if file_to_fix.exists():
         current = stat.S_IMODE(os.stat(file_to_fix).st_mode)
         new = current & ~stat.S_IWGRP & ~stat.S_IWOTH  # Removes group/other write permission
@@ -183,6 +190,7 @@ def change_file_permission(file_to_fix: Path):
 
 
 def change_directory_permission(directory_to_fix: Path):
+    """Update directory permissions to not be group-writeable. Needed to solve cache invalidation problems."""
     if directory_to_fix.exists():
         current = stat.S_IMODE(os.stat(directory_to_fix).st_mode)
         new = current & ~stat.S_IWGRP & ~stat.S_IWOTH  # Removes group/other write permission
@@ -192,9 +200,10 @@ def change_directory_permission(directory_to_fix: Path):
         os.chmod(directory_to_fix, new)
 
 
-@working_directory(AIRFLOW_SOURCE)
+@working_directory(AIRFLOW_SOURCES_ROOT)
 def fix_group_permissions():
-    console.print("[blue]Fixing group permissions[/]")
+    """Fixes permissions of all the files and directories that have group-write access."""
+    console.print("[bright_blue]Fixing group permissions[/]")
     files_to_fix_result = run_command(['git', 'ls-files', './'], capture_output=True, text=True)
     if files_to_fix_result.returncode == 0:
         files_to_fix = files_to_fix_result.stdout.strip().split('\n')
@@ -209,29 +218,95 @@ def fix_group_permissions():
             change_directory_permission(Path(directory_to_fix))
 
 
-def get_latest_sha(repo: str, branch: str):
+def is_repo_rebased(repo: str, branch: str):
+    """Returns True if the local branch contains latest remote SHA (i.e. if it is rebased)"""
     # We import it locally so that click autocomplete works
     import requests
 
     gh_url = f"https://api.github.com/repos/{repo}/commits/{branch}"
     headers_dict = {"Accept": "application/vnd.github.VERSION.sha"}
-    resp = requests.get(gh_url, headers=headers_dict)
-    return resp.text
-
-
-def is_repo_rebased(latest_sha: str):
+    latest_sha = requests.get(gh_url, headers=headers_dict).text.strip()
     rebased = False
-    output = run_command(['git', 'log', '--format=format:%H'], capture_output=True, text=True)
-    output = output.stdout.strip().splitlines()
+    process = run_command(['git', 'log', '--format=format:%H'], capture_output=True, text=True)
+    output = process.stdout.strip().splitlines() if process is not None else "missing"
     if latest_sha in output:
         rebased = True
     return rebased
 
 
-def is_multi_platform(value: str) -> bool:
-    is_multi_platform = False
-    platform_pattern = re.compile('^[0-9a-zA-Z]+,[0-9a-zA-Z]+$')
-    platform_found = platform_pattern.search(value)
-    if platform_found is not None:
-        is_multi_platform = True
-    return is_multi_platform
+def check_if_buildx_plugin_installed(verbose: bool) -> bool:
+    """
+    Checks if buildx plugin is locally available.
+    :param verbose: print commands when running
+    :return True if the buildx plugin is installed.
+    """
+    is_buildx_available = False
+    check_buildx = ['docker', 'buildx', 'version']
+    docker_buildx_version_process = run_command(
+        check_buildx,
+        verbose=verbose,
+        no_output_dump_on_exception=True,
+        capture_output=True,
+        text=True,
+    )
+    if (
+        docker_buildx_version_process
+        and docker_buildx_version_process.returncode == 0
+        and docker_buildx_version_process.stdout != ''
+    ):
+        is_buildx_available = True
+    return is_buildx_available
+
+
+def prepare_build_command(prepare_buildx_cache: bool, verbose: bool) -> List[str]:
+    """
+    Prepare build command for docker build. Depending on whether we have buildx plugin installed or not,
+    and whether we run cache preparation, there might be different results:
+
+    * if buildx plugin is installed - `docker buildx` command is returned - using regular or cache builder
+      depending on whether we build regular image or cache
+    * if no buildx plugin is installed, and we do not prepare cache, regular docker `build` command is used.
+    * if no buildx plugin is installed, and we prepare cache - we fail. Cache can only be done with buildx
+    :param prepare_buildx_cache: whether we are preparing buildx cache.
+    :param verbose: print commands when running
+    :return: command to use as docker build command
+    """
+    build_command_param = []
+    is_buildx_available = check_if_buildx_plugin_installed(verbose=verbose)
+    if is_buildx_available:
+        if prepare_buildx_cache:
+            build_command_param.extend(["buildx", "build", "--builder", "airflow_cache", "--progress=tty"])
+            cmd = ['docker', 'buildx', 'inspect', 'airflow_cache']
+            process = run_command(cmd, verbose=True, text=True)
+            if process and process.returncode != 0:
+                next_cmd = ['docker', 'buildx', 'create', '--name', 'airflow_cache']
+                run_command(next_cmd, verbose=True, text=True, check=False)
+        else:
+            build_command_param.extend(["buildx", "build", "--builder", "default", "--progress=tty"])
+    else:
+        if prepare_buildx_cache:
+            console.print(
+                '\n[red] Buildx cli plugin is not available and you need it to prepare buildx cache. \n'
+            )
+            console.print(
+                '[red] Please install it following https://docs.docker.com/buildx/working-with-buildx/ \n'
+            )
+            sys.exit(1)
+        build_command_param.append("build")
+    return build_command_param
+
+
+@lru_cache(maxsize=None)
+def commit_sha():
+    """Returns commit SHA of current repo. Cached for various usages."""
+    return run_command(
+        ['git', 'rev-parse', 'HEAD'], capture_output=True, text=True, check=False
+    ).stdout.strip()
+
+
+def filter_out_none(**kwargs) -> Dict[str, str]:
+    """Filters out all None values from parameters passed."""
+    for key in list(kwargs):
+        if kwargs[key] is None:
+            kwargs.pop(key)
+    return kwargs
diff --git a/dev/breeze/src/airflow_breeze/utils/visuals.py b/dev/breeze/src/airflow_breeze/utils/visuals.py
new file mode 100644
index 0000000000..b5148a2c74
--- /dev/null
+++ b/dev/breeze/src/airflow_breeze/utils/visuals.py
@@ -0,0 +1,144 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+Visuals displayed to the user when entering Breeze shell.
+"""
+
+from airflow_breeze.global_constants import (
+    FLOWER_HOST_PORT,
+    MSSQL_HOST_PORT,
+    MYSQL_HOST_PORT,
+    POSTGRES_HOST_PORT,
+    REDIS_HOST_PORT,
+    SSH_PORT,
+    WEBSERVER_HOST_PORT,
+)
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT
+
+ASCIIART = """
+
+
+
+
+                                  @&&&&&&@
+                                 @&&&&&&&&&&&@
+                                &&&&&&&&&&&&&&&&
+                                        &&&&&&&&&&
+                                            &&&&&&&
+                                             &&&&&&&
+                           @@@@@@@@@@@@@@@@   &&&&&&
+                          @&&&&&&&&&&&&&&&&&&&&&&&&&&
+                         &&&&&&&&&&&&&&&&&&&&&&&&&&&&
+                                         &&&&&&&&&&&&
+                                             &&&&&&&&&
+                                           &&&&&&&&&&&&
+                                      @@&&&&&&&&&&&&&&&@
+                   @&&&&&&&&&&&&&&&&&&&&&&&&&&&&  &&&&&&
+                  &&&&&&&&&&&&&&&&&&&&&&&&&&&&    &&&&&&
+                 &&&&&&&&&&&&&&&&&&&&&&&&         &&&&&&
+                                                 &&&&&&
+                                               &&&&&&&
+                                            @&&&&&&&&
+            @&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
+           &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
+          &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
+
+
+
+     @&&&@       &&  @&&&&&&&&&&&   &&&&&&&&&&&&  &&            &&&&&&&&&&  &&&     &&&     &&&
+    &&& &&&      &&  @&&       &&&  &&            &&          &&&       &&&@ &&&   &&&&&   &&&
+   &&&   &&&     &&  @&&&&&&&&&&&&  &&&&&&&&&&&   &&          &&         &&&  &&& &&& &&@ &&&
+  &&&&&&&&&&&    &&  @&&&&&&&&&     &&            &&          &&@        &&&   &&@&&   &&@&&
+ &&&       &&&   &&  @&&     &&&@   &&            &&&&&&&&&&&  &&&&&&&&&&&&     &&&&   &&&&
+
+&&&&&&&&&&&&   &&&&&&&&&&&&   &&&&&&&&&&&@  &&&&&&&&&&&&   &&&&&&&&&&&   &&&&&&&&&&&
+&&&       &&&  &&        &&&  &&            &&&                  &&&&    &&
+&&&&&&&&&&&&@  &&&&&&&&&&&&   &&&&&&&&&&&   &&&&&&&&&&&       &&&&       &&&&&&&&&&
+&&&        &&  &&   &&&&      &&            &&&             &&&&         &&
+&&&&&&&&&&&&&  &&     &&&&@   &&&&&&&&&&&@  &&&&&&&&&&&&  @&&&&&&&&&&&   &&&&&&&&&&&
+
+"""
+CHEATSHEET = f"""
+
+                       [bold][bright_blue]Airflow Breeze Cheatsheet[/][/]
+
+    [bright_blue]* Installation[/]
+
+        When you have multiple copies of Airflow, it's better if you use `./Breeze2` from those
+        repository as it will have the latest version of Breeze2 and it's dependencies.
+
+        However if you only have one Airflow repository and you have `pipx` installed, you can use
+        `pipx` to install `Breeze2` command in your path (`Breeze2` command is run from this repository then)
+
+            pipx install -e ./dev/breeze --force
+
+        In case you use `pipx`, you might need to occasionally reinstall `Breeze2` with the `--force` flag
+        when dependencies change for it. You do not have to do it when you use it via `./Breeze2`
+
+    [bright_blue]* Port forwarding:[/]
+
+        Ports are forwarded to the running docker containers for webserver and database
+          * {SSH_PORT} -> forwarded to Airflow ssh server -> airflow:22
+          * {WEBSERVER_HOST_PORT} -> forwarded to Airflow webserver -> airflow:8080
+          * {FLOWER_HOST_PORT} -> forwarded to Flower dashboard -> airflow:5555
+          * {POSTGRES_HOST_PORT} -> forwarded to Postgres database -> postgres:5432
+          * {MYSQL_HOST_PORT} -> forwarded to MySQL database  -> mysql:3306
+          * {MSSQL_HOST_PORT} -> forwarded to MSSQL database  -> mssql:1443
+          * {REDIS_HOST_PORT} -> forwarded to Redis broker -> redis:6379
+
+        Direct links to those services that you can use from the host:
+
+          * ssh connection for remote debugging: ssh -p {SSH_PORT} airflow@127.0.0.1 (password: airflow)
+          * Webserver: http://127.0.0.1:{WEBSERVER_HOST_PORT}
+          * Flower:    http://127.0.0.1:{FLOWER_HOST_PORT}
+          * Postgres:  jdbc:postgresql://127.0.0.1:{POSTGRES_HOST_PORT}/airflow?user=postgres&password=airflow
+          * Mysql:     jdbc:mysql://127.0.0.1:{MYSQL_HOST_PORT}/airflow?user=root
+          * Redis:     redis://127.0.0.1:{REDIS_HOST_PORT}/0
+
+    [bright_blue]* How can I add my stuff in Breeze:[/]
+
+        * Your dags for webserver and scheduler are read from `/files/dags` directory
+          which is mounted from folder in Airflow sources:
+          * `{AIRFLOW_SOURCES_ROOT}/files/dags`
+
+        * You can add `airflow-breeze-config` directory. Place it in
+          `{AIRFLOW_SOURCES_ROOT}/files/airflow-breeze-config` and:
+            * Add `variables.env` - to make breeze source the variables automatically for you
+            * Add `.tmux.conf` - to add extra initial configuration to `tmux`
+            * Add `init.sh` - this file will be sourced when you enter container, so you can add
+              any custom code there.
+
+        * You can put any other files. You can add them in
+          `{AIRFLOW_SOURCES_ROOT}/files` folder
+          and they will be visible in `/files/` folder inside the container
+
+        [bright_blue]* Other options[/]
+
+        Check out `--help` for ./Breeze2 commands. It will show you other options, such as running
+        integration or starting complete Airflow using `start-airflow` command as well as ways
+        of cleaning up the installation.
+
+        Make sure to run `setup-autocomplete` to get the commands and options auto-completable
+        in your shell.
+
+        You can disable this cheatsheet by running:
+
+            ./Breeze2 config --no-cheatsheet
+
+"""
+CHEATSHEET_STYLE = "white"
+ASCIIART_STYLE = "white"
diff --git a/dev/breeze/src/airflow_breeze/visuals/__init__.py b/dev/breeze/src/airflow_breeze/visuals/__init__.py
deleted file mode 100644
index f13a1af5c9..0000000000
--- a/dev/breeze/src/airflow_breeze/visuals/__init__.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from airflow_breeze.global_constants import (
-    FLOWER_HOST_PORT,
-    MSSQL_HOST_PORT,
-    MYSQL_HOST_PORT,
-    POSTGRES_HOST_PORT,
-    REDIS_HOST_PORT,
-    SSH_PORT,
-    WEBSERVER_HOST_PORT,
-)
-from airflow_breeze.utils.path_utils import get_airflow_sources_root
-
-ASCIIART = """
-
-
-
-
-                                  @&&&&&&@
-                                 @&&&&&&&&&&&@
-                                &&&&&&&&&&&&&&&&
-                                        &&&&&&&&&&
-                                            &&&&&&&
-                                             &&&&&&&
-                           @@@@@@@@@@@@@@@@   &&&&&&
-                          @&&&&&&&&&&&&&&&&&&&&&&&&&&
-                         &&&&&&&&&&&&&&&&&&&&&&&&&&&&
-                                         &&&&&&&&&&&&
-                                             &&&&&&&&&
-                                           &&&&&&&&&&&&
-                                      @@&&&&&&&&&&&&&&&@
-                   @&&&&&&&&&&&&&&&&&&&&&&&&&&&&  &&&&&&
-                  &&&&&&&&&&&&&&&&&&&&&&&&&&&&    &&&&&&
-                 &&&&&&&&&&&&&&&&&&&&&&&&         &&&&&&
-                                                 &&&&&&
-                                               &&&&&&&
-                                            @&&&&&&&&
-            @&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
-           &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
-          &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
-
-
-
-     @&&&@       &&  @&&&&&&&&&&&   &&&&&&&&&&&&  &&            &&&&&&&&&&  &&&     &&&     &&&
-    &&& &&&      &&  @&&       &&&  &&            &&          &&&       &&&@ &&&   &&&&&   &&&
-   &&&   &&&     &&  @&&&&&&&&&&&&  &&&&&&&&&&&   &&          &&         &&&  &&& &&& &&@ &&&
-  &&&&&&&&&&&    &&  @&&&&&&&&&     &&            &&          &&@        &&&   &&@&&   &&@&&
- &&&       &&&   &&  @&&     &&&@   &&            &&&&&&&&&&&  &&&&&&&&&&&&     &&&&   &&&&
-
-&&&&&&&&&&&&   &&&&&&&&&&&&   &&&&&&&&&&&@  &&&&&&&&&&&&   &&&&&&&&&&&   &&&&&&&&&&&
-&&&       &&&  &&        &&&  &&            &&&                  &&&&    &&
-&&&&&&&&&&&&@  &&&&&&&&&&&&   &&&&&&&&&&&   &&&&&&&&&&&       &&&&       &&&&&&&&&&
-&&&        &&  &&   &&&&      &&            &&&             &&&&         &&
-&&&&&&&&&&&&&  &&     &&&&@   &&&&&&&&&&&@  &&&&&&&&&&&&  @&&&&&&&&&&&   &&&&&&&&&&&
-
-"""
-
-ASCIIART_STYLE = "white"
-
-
-CHEATSHEET = f"""
-Airflow Breeze CHEATSHEET
-Adding breeze to your path:
-   When you exit the environment, you can add sources of Airflow to the path - you can
-   run breeze or the scripts above from any directory by calling 'breeze' commands directly
-
-   \'{str(get_airflow_sources_root())}\' is exported into PATH
-
-    Port forwarding:
-      Ports are forwarded to the running docker containers for webserver and database
-        * {SSH_PORT} -> forwarded to Airflow ssh server -> airflow:22
-        * {WEBSERVER_HOST_PORT} -> forwarded to Airflow webserver -> airflow:8080
-        * {FLOWER_HOST_PORT} -> forwarded to Flower dashboard -> airflow:5555
-        * {POSTGRES_HOST_PORT} -> forwarded to Postgres database -> postgres:5432
-        * {MYSQL_HOST_PORT} -> forwarded to MySQL database  -> mysql:3306
-        * {MSSQL_HOST_PORT} -> forwarded to MSSQL database  -> mssql:1443
-        * {REDIS_HOST_PORT} -> forwarded to Redis broker -> redis:6379
-      Here are links to those services that you can use on host:"
-        * ssh connection for remote debugging: ssh -p {SSH_PORT} airflow@127.0.0.1 pw: airflow"
-        * Webserver: http://127.0.0.1:{WEBSERVER_HOST_PORT}"
-        * Flower:    http://127.0.0.1:{FLOWER_HOST_PORT}"
-        * Postgres:  jdbc:postgresql://127.0.0.1:{POSTGRES_HOST_PORT}/airflow?user=postgres&password=airflow"
-        * Mysql:     jdbc:mysql://127.0.0.1:{MYSQL_HOST_PORT}/airflow?user=root"
-        * Redis:     redis://127.0.0.1:{REDIS_HOST_PORT}/0"
-
-"""
-
-CHEATSHEET_STYLE = "white"
diff --git a/dev/breeze/src/airflow_ci/find_newer_dependencies.py b/dev/breeze/src/airflow_ci/find_newer_dependencies.py
index 7f6bbdc445..7742ff2910 100644
--- a/dev/breeze/src/airflow_ci/find_newer_dependencies.py
+++ b/dev/breeze/src/airflow_ci/find_newer_dependencies.py
@@ -14,7 +14,20 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+"""
+Finds which newer dependencies were used to build that build and prints them for better diagnostics.
 
+This is a common problem that currently `pip` does not produce "perfect" information about the errors,
+and we sometimes need to guess what new dependency caused long backtracking. Once we know short
+list of candidates, we can (following a manual process) pinpoint the actual culprit.
+
+This small tool is run in CI whenever the image build timed out - so that we can easier guess
+which dependency caused the problem.
+
+The process to follow once you see the backtracking is described in:
+
+https://github.com/apache/airflow/blob/main/dev/TRACKING_BACKTRACKING_ISSUES.md
+"""
 import json
 from datetime import timedelta
 from typing import Any, Dict, List, Tuple
@@ -102,10 +115,10 @@ def main(constraints_branch: str, python: str, timezone: str, updated_on_or_afte
             hour=0, minute=0, second=0, microsecond=0
         )
     console.print(
-        "\n[yellow]Those are possible candidates that broke current "
+        "\n[bright_yellow]Those are possible candidates that broke current "
         "`pip` resolution mechanisms by falling back to long backtracking[/]\n"
     )
-    console.print(f"\n[yellow]We are limiting to packages updated after {min_date} ({timezone})[/]\n")
+    console.print(f"\n[bright_yellow]We are limiting to packages updated after {min_date} ({timezone})[/]\n")
     with Progress(console=console) as progress:
         task = progress.add_task(f"Processing {count_packages} packages.", total=count_packages)
         for package_line in package_lines:
@@ -122,14 +135,16 @@ def main(constraints_branch: str, python: str, timezone: str, updated_on_or_afte
                 constrained_packages[package] = constraints_package_version
             progress.advance(task)
             progress.refresh()
-    console.print("\n[yellow]If you see long running builds with `pip` backtracking, you should follow[/]")
     console.print(
-        "[yellow]https://github.com/apache/airflow/blob/main/dev/TRACKING_BACKTRACKING_ISSUES.md[/]\n"
+        "\n[bright_yellow]If you see long running builds with `pip` backtracking, you should follow[/]"
+    )
+    console.print(
+        "[bright_yellow]https://github.com/apache/airflow/blob/main/dev/TRACKING_BACKTRACKING_ISSUES.md[/]\n"
     )
     constraint_string = ""
     for package, constrained_version in constrained_packages.items():
         constraint_string += f' "{package}=={constrained_version}"'
-    console.print("[yellow]Use the following pip install command (see the doc above for details)\n")
+    console.print("[bright_yellow]Use the following pip install command (see the doc above for details)\n")
     console.print(
         'pip install ".[devel_all]" --upgrade --upgrade-strategy eager '
         '"dill<0.3.3" "certifi<2021.0.0" "google-ads<14.0.1"' + constraint_string,
diff --git a/dev/breeze/src/airflow_ci/freespace.py b/dev/breeze/src/airflow_ci/freespace.py
index bbec8c6d82..cb24ac3a24 100755
--- a/dev/breeze/src/airflow_ci/freespace.py
+++ b/dev/breeze/src/airflow_ci/freespace.py
@@ -17,15 +17,14 @@
 # specific language governing permissions and limitations
 # under the License.
 
-"""freespace.py for clean environment before start CI"""
+"""Cleans up the environment before starting CI."""
 
-import shlex
-import subprocess
-from typing import List
 
 import rich_click as click
 from rich.console import Console
 
+from airflow_breeze.utils.run_utils import run_command
+
 console = Console(force_terminal=True, color_system="standard", width=180)
 
 option_verbose = click.option(
@@ -46,26 +45,14 @@ option_dry_run = click.option(
 @option_verbose
 @option_dry_run
 def main(verbose, dry_run):
-    run_command(["sudo", "swapoff", "-a"], verbose, dry_run)
-    run_command(["sudo", "rm", "-f", "/swapfile"], verbose, dry_run)
-    run_command(["sudo", "apt-get", "clean"], verbose, dry_run, check=False)
-    run_command(["docker", "system", "prune", "--all", "--force", "--volumes"], verbose, dry_run)
-    run_command(["df", "-h"], verbose, dry_run)
-    run_command(["docker", "logout", "ghcr.io"], verbose, dry_run)
-
-
-def run_command(cmd: List[str], verbose, dry_run, *, check: bool = True, **kwargs):
-    if verbose:
-        console.print(f"\n[green]$ {' '.join(shlex.quote(c) for c in cmd)}[/]\n")
-    if dry_run:
-        return
-    try:
-        subprocess.run(cmd, check=check, **kwargs)
-    except subprocess.CalledProcessError as ex:
-        print("========================= OUTPUT start ============================")
-        print(ex.stderr)
-        print(ex.stdout)
-        print("========================= OUTPUT end ============================")
+    run_command(["sudo", "swapoff", "-a"], verbose=verbose, dry_run=dry_run)
+    run_command(["sudo", "rm", "-f", "/swapfile"], verbose=verbose, dry_run=dry_run)
+    run_command(["sudo", "apt-get", "clean"], verbose=verbose, dry_run=dry_run, check=False)
+    run_command(
+        ["docker", "system", "prune", "--all", "--force", "--volumes"], verbose=verbose, dry_run=dry_run
+    )
+    run_command(["df", "-h"], verbose=verbose, dry_run=dry_run)
+    run_command(["docker", "logout", "ghcr.io"], verbose=verbose, dry_run=dry_run)
 
 
 if __name__ == '__main__':
diff --git a/dev/breeze/tests/test_build_image.py b/dev/breeze/tests/test_build_image.py
index f475657bd4..77c9c7759d 100644
--- a/dev/breeze/tests/test_build_image.py
+++ b/dev/breeze/tests/test_build_image.py
@@ -18,30 +18,30 @@ from unittest.mock import patch
 
 import pytest
 
-from airflow_breeze.ci.build_image import get_image_build_params
+from airflow_breeze.build_image.ci.build_ci_image import get_ci_image_build_params
 
 
 @pytest.mark.parametrize(
     'parameters, expected_build_params, cached_values, written_cache_version',
     [
-        ({}, {"python_version": "3.7"}, {}, False),  # default value no params
-        ({"python_version": "3.8"}, {"python_version": "3.8"}, {}, "3.8"),  # default value override params
-        ({}, {"python_version": "3.8"}, {'PYTHON_MAJOR_MINOR_VERSION': "3.8"}, False),  # value from cache
+        ({}, {"python": "3.7"}, {}, False),  # default value no params
+        ({"python": "3.8"}, {"python": "3.8"}, {}, "3.8"),  # default value override params
+        ({}, {"python": "3.8"}, {'PYTHON_MAJOR_MINOR_VERSION': "3.8"}, False),  # value from cache
         (
-            {"python_version": "3.9"},
-            {"python_version": "3.9"},
+            {"python": "3.9"},
+            {"python": "3.9"},
             {'PYTHON_MAJOR_MINOR_VERSION': "3.8"},
             "3.9",
         ),  # override cache with passed param
     ],
 )
 def test_get_image_params(parameters, expected_build_params, cached_values, written_cache_version):
-    with patch('airflow_breeze.cache.read_from_cache_file') as read_from_cache_mock, patch(
-        'airflow_breeze.cache.check_if_cache_exists'
+    with patch('airflow_breeze.utils.cache.read_from_cache_file') as read_from_cache_mock, patch(
+        'airflow_breeze.utils.cache.check_if_cache_exists'
     ) as check_if_cache_exists_mock, patch(
-        'airflow_breeze.ci.build_image.write_to_cache_file'
+        'airflow_breeze.utils.cache.write_to_cache_file'
     ) as write_to_cache_file_mock, patch(
-        'airflow_breeze.ci.build_image.check_cache_and_write_if_not_cached'
+        'airflow_breeze.utils.cache.check_cached_value_is_allowed'
     ) as check_cache_and_write_mock:
         check_if_cache_exists_mock.return_value = True
         check_cache_and_write_mock.side_effect = lambda cache_key, default_value: (
@@ -49,7 +49,7 @@ def test_get_image_params(parameters, expected_build_params, cached_values, writ
             cached_values[cache_key] if cache_key in cached_values else default_value,
         )
         read_from_cache_mock.side_effect = lambda param_name: cached_values.get(param_name)
-        build_parameters = get_image_build_params(parameters)
+        build_parameters = get_ci_image_build_params(parameters)
         for param, param_value in expected_build_params.items():
             assert getattr(build_parameters, param) == param_value
         if written_cache_version:
diff --git a/dev/breeze/tests/test_cache.py b/dev/breeze/tests/test_cache.py
index 64bd6237cd..2e0931d2d5 100644
--- a/dev/breeze/tests/test_cache.py
+++ b/dev/breeze/tests/test_cache.py
@@ -20,7 +20,7 @@ from unittest import mock
 
 import pytest
 
-from airflow_breeze.cache import (
+from airflow_breeze.utils.cache import (
     check_if_cache_exists,
     check_if_values_allowed,
     delete_cache,
@@ -48,7 +48,7 @@ def test_allowed_values(parameter, value, result, exception):
         assert result == check_if_values_allowed(parameter, value)
 
 
-@mock.patch("airflow_breeze.cache.Path")
+@mock.patch("airflow_breeze.utils.cache.Path")
 def test_check_if_cache_exists(path):
     check_if_cache_exists("test_param")
     path.assert_called_once_with(AIRFLOW_SOURCES / ".build")
@@ -72,8 +72,8 @@ def test_read_from_cache_file(param):
             assert param_value in param_list
 
 
-@mock.patch('airflow_breeze.cache.Path')
-@mock.patch('airflow_breeze.cache.check_if_cache_exists')
+@mock.patch('airflow_breeze.utils.cache.Path')
+@mock.patch('airflow_breeze.utils.cache.check_if_cache_exists')
 def test_delete_cache_exists(mock_check_if_cache_exists, mock_path):
     param = "MYSQL_VERSION"
     mock_check_if_cache_exists.return_value = True
@@ -82,8 +82,8 @@ def test_delete_cache_exists(mock_check_if_cache_exists, mock_path):
     assert cache_deleted
 
 
-@mock.patch('airflow_breeze.cache.Path')
-@mock.patch('airflow_breeze.cache.check_if_cache_exists')
+@mock.patch('airflow_breeze.utils.cache.Path')
+@mock.patch('airflow_breeze.utils.cache.check_if_cache_exists')
 def test_delete_cache_not_exists(mock_check_if_cache_exists, mock_path):
     param = "TEST_PARAM"
     mock_check_if_cache_exists.return_value = False
diff --git a/dev/breeze/tests/test_commands.py b/dev/breeze/tests/test_commands.py
index 9d29251ea6..6116752699 100644
--- a/dev/breeze/tests/test_commands.py
+++ b/dev/breeze/tests/test_commands.py
@@ -14,10 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+from airflow_breeze.global_constants import MOUNT_ALL, MOUNT_NONE, MOUNT_SELECTED
 from airflow_breeze.utils.docker_command_utils import get_extra_docker_flags
-from airflow_breeze.utils.path_utils import get_airflow_sources_root
-from airflow_breeze.visuals import ASCIIART
+from airflow_breeze.utils.visuals import ASCIIART
 
 
 def test_visuals():
@@ -25,13 +24,6 @@ def test_visuals():
 
 
 def test_get_extra_docker_flags():
-    airflow_sources = get_airflow_sources_root()
-    all = True
-    selected = False
-    assert len(get_extra_docker_flags(all, selected, str(airflow_sources))) < 10
-    all = False
-    selected = True
-    assert len(get_extra_docker_flags(all, selected, str(airflow_sources))) > 60
-    all = False
-    selected = False
-    assert len(get_extra_docker_flags(all, selected, str(airflow_sources))) < 8
+    assert len(get_extra_docker_flags(MOUNT_ALL)) < 10
+    assert len(get_extra_docker_flags(MOUNT_SELECTED)) > 60
+    assert len(get_extra_docker_flags(MOUNT_NONE)) < 8
diff --git a/dev/breeze/tests/test_docker_command_utils.py b/dev/breeze/tests/test_docker_command_utils.py
index 53df745501..598e760fd5 100644
--- a/dev/breeze/tests/test_docker_command_utils.py
+++ b/dev/breeze/tests/test_docker_command_utils.py
@@ -26,11 +26,18 @@ from airflow_breeze.utils.docker_command_utils import check_docker_compose_versi
 def test_check_docker_version_unknown(mock_console, mock_run_command):
     check_docker_version(verbose=True)
     expected_run_command_calls = [
-        call(['docker', 'info'], verbose=True, suppress_console_print=True, capture_output=True, text=True),
+        call(
+            ['docker', 'info'],
+            verbose=True,
+            check=True,
+            no_output_dump_on_exception=True,
+            capture_output=True,
+            text=True,
+        ),
         call(
             ['docker', 'version', '--format', '{{.Client.Version}}'],
             verbose=True,
-            suppress_console_print=True,
+            no_output_dump_on_exception=True,
             capture_output=True,
             text=True,
         ),
@@ -55,7 +62,7 @@ def test_check_docker_version_too_low(mock_console, mock_run_command, mock_check
     mock_run_command.assert_called_with(
         ['docker', 'version', '--format', '{{.Client.Version}}'],
         verbose=True,
-        suppress_console_print=True,
+        no_output_dump_on_exception=True,
         capture_output=True,
         text=True,
     )
@@ -76,7 +83,7 @@ def test_check_docker_version_ok(mock_console, mock_run_command, mock_check_dock
     mock_run_command.assert_called_with(
         ['docker', 'version', '--format', '{{.Client.Version}}'],
         verbose=True,
-        suppress_console_print=True,
+        no_output_dump_on_exception=True,
         capture_output=True,
         text=True,
     )
@@ -95,7 +102,7 @@ def test_check_docker_version_higher(mock_console, mock_run_command, mock_check_
     mock_run_command.assert_called_with(
         ['docker', 'version', '--format', '{{.Client.Version}}'],
         verbose=True,
-        suppress_console_print=True,
+        no_output_dump_on_exception=True,
         capture_output=True,
         text=True,
     )
@@ -110,7 +117,7 @@ def test_check_docker_compose_version_unknown(mock_console, mock_run_command):
         call(
             ["docker-compose", "--version"],
             verbose=True,
-            suppress_console_print=True,
+            no_output_dump_on_exception=True,
             capture_output=True,
             text=True,
         ),
@@ -131,7 +138,7 @@ def test_check_docker_compose_version_low(mock_console, mock_run_command):
     mock_run_command.assert_called_with(
         ["docker-compose", "--version"],
         verbose=True,
-        suppress_console_print=True,
+        no_output_dump_on_exception=True,
         capture_output=True,
         text=True,
     )
@@ -157,7 +164,7 @@ def test_check_docker_compose_version_ok(mock_console, mock_run_command):
     mock_run_command.assert_called_with(
         ["docker-compose", "--version"],
         verbose=True,
-        suppress_console_print=True,
+        no_output_dump_on_exception=True,
         capture_output=True,
         text=True,
     )
@@ -173,7 +180,7 @@ def test_check_docker_compose_version_higher(mock_console, mock_run_command):
     mock_run_command.assert_called_with(
         ["docker-compose", "--version"],
         verbose=True,
-        suppress_console_print=True,
+        no_output_dump_on_exception=True,
         capture_output=True,
         text=True,
     )
diff --git a/dev/breeze/tests/test_find_airflow_directory.py b/dev/breeze/tests/test_find_airflow_directory.py
index 90ac439895..894c545875 100644
--- a/dev/breeze/tests/test_find_airflow_directory.py
+++ b/dev/breeze/tests/test_find_airflow_directory.py
@@ -19,7 +19,7 @@ import os
 from pathlib import Path
 from unittest import mock
 
-from airflow_breeze.utils.path_utils import find_airflow_sources_root, get_airflow_sources_root
+from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, find_airflow_sources_root
 
 ACTUAL_AIRFLOW_SOURCES = Path(__file__).parent.parent.parent.parent
 ROOT_PATH = Path(Path(__file__).root)
@@ -28,7 +28,7 @@ ROOT_PATH = Path(Path(__file__).root)
 def test_find_airflow_root_upwards_from_cwd(capsys):
     os.chdir(Path(__file__).parent)
     find_airflow_sources_root()
-    assert ACTUAL_AIRFLOW_SOURCES == get_airflow_sources_root()
+    assert ACTUAL_AIRFLOW_SOURCES == AIRFLOW_SOURCES_ROOT
     output = str(capsys.readouterr().out)
     assert output == ''
 
@@ -36,16 +36,16 @@ def test_find_airflow_root_upwards_from_cwd(capsys):
 def test_find_airflow_root_upwards_from_file(capsys):
     os.chdir(Path(__file__).root)
     find_airflow_sources_root()
-    assert ACTUAL_AIRFLOW_SOURCES == get_airflow_sources_root()
+    assert ACTUAL_AIRFLOW_SOURCES == AIRFLOW_SOURCES_ROOT
     output = str(capsys.readouterr().out)
     assert output == ''
 
 
-@mock.patch('airflow_breeze.utils.path_utils.__AIRFLOW_SOURCES_ROOT', ROOT_PATH)
-@mock.patch('airflow_breeze.utils.path_utils.__AIRFLOW_CFG_FILE', "bad_name.cfg")
-def test_fallback_find_airflow_root(capsys):
-    os.chdir(ROOT_PATH)
-    find_airflow_sources_root()
-    assert ROOT_PATH == get_airflow_sources_root()
+@mock.patch('airflow_breeze.utils.path_utils.AIRFLOW_CFG_FILE', "bad_name.cfg")
+@mock.patch('airflow_breeze.utils.path_utils.Path.cwd')
+def test_fallback_find_airflow_root(mock_cwd, capsys):
+    mock_cwd.return_value = ROOT_PATH
+    sources = find_airflow_sources_root()
+    assert sources == ROOT_PATH
     output = str(capsys.readouterr().out)
     assert "Could not find Airflow sources" in output
diff --git a/dev/breeze/tests/test_prod_image.py b/dev/breeze/tests/test_prod_image.py
index 09507709f4..1c4b406901 100644
--- a/dev/breeze/tests/test_prod_image.py
+++ b/dev/breeze/tests/test_prod_image.py
@@ -19,69 +19,76 @@ from unittest.mock import patch
 
 import pytest
 
-from airflow_breeze.prod.build_prod_image import get_image_build_params
+from airflow_breeze.build_image.prod.build_prod_image import get_prod_image_build_params
 
-default_params = {
-    'build_cache_local': False,
-    'build_cache_pulled': False,
-    'build_cache_disabled': False,
-    'skip_rebuild_check': False,
+default_params: Dict[str, Union[str, bool]] = {
+    'docker_cache': "pulled",
     'disable_mysql_client_installation': False,
     'disable_mssql_client_installation': False,
     'disable_postgres_client_installation': False,
     'install_docker_context_files': False,
-    'disable_pypi_when_building': False,
-    'disable_pip_cache': False,
-    'upgrade_to_newer_dependencies': False,
-    'skip_installing_airflow_providers_from_sources': False,
+    'disable_airflow_repo_cache': False,
+    'upgrade_to_newer_dependencies': "false",
+    'install_providers_from_sources': False,
     'cleanup_docker_context_files': False,
     'prepare_buildx_cache': False,
 }
 
-params_python8 = {**default_params, "python_version": "3.8"}  # type: Dict[str, Union[str, bool]]
+params_python8 = {**default_params, "python": "3.8"}  # type: Dict[str, Union[str, bool]]
 
-params_python9 = {**default_params, "python_version": "3.9"}  # type: Dict[str, Union[str, bool]]
+params_python9 = {**default_params, "python": "3.9"}  # type: Dict[str, Union[str, bool]]
 
 
 @pytest.mark.parametrize(
-    'parameters, expected_build_params, cached_values, written_cache_version',
+    'description, parameters, expected_build_params, cached_values, written_cache_version, check_if_allowed',
     [
-        (default_params, {"python_version": "3.7"}, {}, False),  # default value no params
-        (params_python8, {"python_version": "3.8"}, {}, "3.8"),  # default value override params
+        ("default value no cache", default_params, {"python": "3.7"}, {}, "3.7", False),
+        ("passed value different no cache", params_python8, {"python": "3.8"}, {}, "3.8", True),
         (
-            default_params,
-            {"python_version": "3.8"},
+            "passed value same as cache",
+            params_python8,
+            {"python": "3.8"},
             {'PYTHON_MAJOR_MINOR_VERSION': "3.8"},
-            False,
-        ),  # value from cache
+            "3.8",
+            True,
+        ),
         (
+            "passed value different than cache",
             params_python9,
-            {"python_version": "3.9"},
+            {"python": "3.9"},
             {'PYTHON_MAJOR_MINOR_VERSION': "3.8"},
             "3.9",
-        ),  # override cache with passed param
+            True,
+        ),
     ],
 )
-def test_get_image_params(parameters, expected_build_params, cached_values, written_cache_version):
-    with patch('airflow_breeze.cache.read_from_cache_file') as read_from_cache_mock, patch(
-        'airflow_breeze.cache.check_if_cache_exists'
+def test_get_image_params(
+    description, parameters, expected_build_params, cached_values, written_cache_version, check_if_allowed
+):
+    with patch('airflow_breeze.utils.cache.read_from_cache_file') as read_from_cache_mock, patch(
+        'airflow_breeze.utils.cache.check_if_cache_exists'
     ) as check_if_cache_exists_mock, patch(
-        'airflow_breeze.prod.build_prod_image.write_to_cache_file'
+        'airflow_breeze.utils.cache.write_to_cache_file'
     ) as write_to_cache_file_mock, patch(
-        'airflow_breeze.prod.build_prod_image.check_cache_and_write_if_not_cached'
-    ) as check_cache_and_write_mock:
+        'airflow_breeze.utils.cache.read_from_cache_file'
+    ) as read_from_cache_file:
         check_if_cache_exists_mock.return_value = True
-        check_cache_and_write_mock.side_effect = lambda cache_key, default_value: (
+        read_from_cache_file.side_effect = lambda cache_key: (
             cache_key in cached_values,
-            cached_values[cache_key] if cache_key in cached_values else default_value,
+            cached_values[cache_key] if cache_key in cached_values else None,
         )
         read_from_cache_mock.side_effect = lambda param_name: cached_values.get(param_name)
-        build_parameters = get_image_build_params(parameters)
+        build_parameters = get_prod_image_build_params(parameters)
         for param, param_value in expected_build_params.items():
             assert getattr(build_parameters, param) == param_value
         if written_cache_version:
-            write_to_cache_file_mock.assert_called_once_with(
-                "PYTHON_MAJOR_MINOR_VERSION", written_cache_version, check_allowed_values=True
-            )
+            if check_if_allowed:
+                write_to_cache_file_mock.assert_called_once_with(
+                    "PYTHON_MAJOR_MINOR_VERSION", written_cache_version, check_allowed_values=True
+                )
+            else:
+                write_to_cache_file_mock.assert_called_once_with(
+                    "PYTHON_MAJOR_MINOR_VERSION", written_cache_version
+                )
         else:
             write_to_cache_file_mock.assert_not_called()
diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py
index 765f6d8560..6d4cf34611 100755
--- a/dev/retag_docker_images.py
+++ b/dev/retag_docker_images.py
@@ -52,13 +52,13 @@ def pull_push_all_images(
     target_branch: str,
     target_repo: str,
 ):
-    for python_version in PYTHON_VERSIONS:
+    for python in PYTHON_VERSIONS:
         for image in images:
             source_image = image.format(
-                prefix=source_prefix, branch=source_branch, repo=source_repo, python_version=python_version
+                prefix=source_prefix, branch=source_branch, repo=source_repo, python=python
             )
             target_image = image.format(
-                prefix=target_prefix, branch=target_branch, repo=target_repo, python_version=python_version
+                prefix=target_prefix, branch=target_branch, repo=target_repo, python=python
             )
             print(f"Copying image: {source_image} -> {target_image}")
             subprocess.run(["docker", "pull", source_image], check=True)
diff --git a/scripts/ci/docker-compose/_docker.env b/scripts/ci/docker-compose/_docker.env
index b9271c063a..6e94776dfd 100644
--- a/scripts/ci/docker-compose/_docker.env
+++ b/scripts/ci/docker-compose/_docker.env
@@ -15,7 +15,6 @@
 # specific language governing permissions and limitations
 # under the License.
 AIRFLOW_CI_IMAGE
-AIRFLOW_EXTRAS
 BACKEND
 BREEZE
 CI
diff --git a/scripts/ci/docker-compose/base.yml b/scripts/ci/docker-compose/base.yml
index 3833e8807a..22e2e51bf0 100644
--- a/scripts/ci/docker-compose/base.yml
+++ b/scripts/ci/docker-compose/base.yml
@@ -29,7 +29,6 @@ services:
       # We need all those env variables here because docker-compose-v2 does not really work well
       # With env files and there are many problems with it:
       - AIRFLOW_CI_IMAGE=${AIRFLOW_CI_IMAGE}
-      - AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}
       - BACKEND=${BACKEND}
       - BREEZE=${BREEZE}
       - CI=${CI}
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 4c43360869..20a22c9207 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -69,11 +69,6 @@ function build_images::add_build_args_for_remote_install() {
         "--build-arg" "AIRFLOW_SOURCES_FROM=Dockerfile"
         "--build-arg" "AIRFLOW_SOURCES_TO=/Dockerfile"
     )
-    if [[ ${CI} == "true" ]]; then
-        EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-            "--build-arg" "PIP_PROGRESS_BAR=off"
-        )
-    fi
     if [[ -n "${AIRFLOW_CONSTRAINTS_REFERENCE}" ]]; then
         EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
             "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE}"
@@ -241,12 +236,10 @@ function build_images::confirm_image_rebuild() {
         echo  "${COLOR_RED}ERROR: The ${THE_IMAGE_TYPE} needs to be rebuilt - it is outdated.   ${COLOR_RESET}"
         echo """
 
-   Make sure you build the image by running:
+   ${COLOR_YELLOW}Make sure you build the image by running:${COLOR_RESET}
 
       ./breeze --python ${PYTHON_MAJOR_MINOR_VERSION} build-image
 
-   If you run it via pre-commit as individual hook, you can run 'pre-commit run build'.
-
 """
         exit 1
     else
@@ -489,11 +482,6 @@ function build_images::build_ci_image() {
         )
     fi
     local extra_docker_ci_flags=()
-    if [[ ${CI} == "true" ]]; then
-        EXTRA_DOCKER_PROD_BUILD_FLAGS+=(
-            "--build-arg" "PIP_PROGRESS_BAR=off"
-        )
-    fi
     if [[ -n "${AIRFLOW_CONSTRAINTS_LOCATION}" ]]; then
         extra_docker_ci_flags+=(
             "--build-arg" "AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}"
diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh
index 1cc8660465..8b053d9e9c 100755
--- a/scripts/docker/entrypoint_ci.sh
+++ b/scripts/docker/entrypoint_ci.sh
@@ -55,10 +55,11 @@ export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}}
 if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
 
     echo
-    echo "Airflow home: ${AIRFLOW_HOME}"
-    echo "Airflow sources: ${AIRFLOW_SOURCES}"
-    echo "Airflow core SQL connection: ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}"
-
+    echo "${COLOR_BLUE}Running Initialization. Your basic configuration is:${COLOR_RESET}"
+    echo
+    echo "  * ${COLOR_BLUE}Airflow home:${COLOR_RESET} ${AIRFLOW_HOME}"
+    echo "  * ${COLOR_BLUE}Airflow sources:${COLOR_RESET} ${AIRFLOW_SOURCES}"
+    echo "  * ${COLOR_BLUE}Airflow core SQL connection:${COLOR_RESET} ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}"
     echo
 
     RUN_TESTS=${RUN_TESTS:="false"}
@@ -68,7 +69,7 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
     if [[ ${USE_AIRFLOW_VERSION} == "" ]]; then
         export PYTHONPATH=${AIRFLOW_SOURCES}
         echo
-        echo "Using already installed airflow version"
+        echo "${COLOR_BLUE}Using airflow version from current sources${COLOR_RESET}"
         echo
         if [[ -d "${AIRFLOW_SOURCES}/airflow/www/" ]]; then
             pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null
@@ -82,38 +83,38 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
         mkdir -p "${AIRFLOW_SOURCES}"/tmp/
     elif [[ ${USE_AIRFLOW_VERSION} == "none"  ]]; then
         echo
-        echo "Skip installing airflow - only install wheel/tar.gz packages that are present locally"
+        echo "${COLOR_BLUE}Skip installing airflow - only install wheel/tar.gz packages that are present locally.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
     elif [[ ${USE_AIRFLOW_VERSION} == "wheel"  ]]; then
         echo
-        echo "Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers."
+        echo "${COLOR_BLUE}Install airflow from wheel package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
         install_airflow_from_wheel "[${AIRFLOW_EXTRAS}]"
         uninstall_providers
     elif [[ ${USE_AIRFLOW_VERSION} == "sdist"  ]]; then
         echo
-        echo "Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers."
+        echo "${COLOR_BLUE}Install airflow from sdist package with [${AIRFLOW_EXTRAS}] extras but uninstalling providers.${COLOR_RESET}"
         echo
         uninstall_airflow_and_providers
         install_airflow_from_sdist "[${AIRFLOW_EXTRAS}]"
         uninstall_providers
     else
         echo
-        echo "Install airflow from PyPI without extras"
+        echo "${COLOR_BLUE}Install airflow from PyPI without extras"
         echo
         install_released_airflow_version "${USE_AIRFLOW_VERSION}"
     fi
     if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then
         echo
-        echo "Install all packages from dist folder"
+        echo "${COLOR_BLUE}Install all packages from dist folder"
         if [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then
             echo "(except apache-airflow)"
         fi
         if [[ ${PACKAGE_FORMAT} == "both" ]]; then
             echo
-            echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'${COLOR_RESET}"
+            echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'.${COLOR_RESET}"
             echo
             exit 1
         fi
@@ -197,7 +198,7 @@ if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then
 
     cd "${AIRFLOW_SOURCES}"
 
-    if [[ ${START_AIRFLOW:="false"} == "true" ]]; then
+    if [[ ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" ]]; then
         export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS}
         export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES}
         # shellcheck source=scripts/in_container/bin/run_tmux
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index 82b26090a4..26d1f45200 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -54,7 +54,7 @@ function assert_in_container() {
 }
 
 function in_container_script_start() {
-    if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then
+    if [[ ${VERBOSE_COMMANDS:="false"} == "true" || ${VERBOSE_COMMANDS} == "True" ]]; then
         set -x
     fi
 }
@@ -63,14 +63,14 @@ function in_container_script_end() {
     #shellcheck disable=2181
     EXIT_CODE=$?
     if [[ ${EXIT_CODE} != 0 ]]; then
-        if [[ "${PRINT_INFO_FROM_SCRIPTS="true"}" == "true" ]]; then
+        if [[ "${PRINT_INFO_FROM_SCRIPTS="true"}" == "true" || "${PRINT_INFO_FROM_SCRIPTS}" == "True" ]]; then
             echo "########################################################################################################################"
             echo "${COLOR_BLUE} [IN CONTAINER]   EXITING ${0} WITH EXIT CODE ${EXIT_CODE}  ${COLOR_RESET}"
             echo "########################################################################################################################"
         fi
     fi
 
-    if [[ ${VERBOSE_COMMANDS} == "true" ]]; then
+    if [[ ${VERBOSE_COMMANDS:="false"} == "true" || ${VERBOSE_COMMANDS} == "True" ]]; then
         set +x
     fi
 }
@@ -322,7 +322,7 @@ function setup_provider_packages() {
     export PACKAGE_PREFIX_UPPERCASE=""
     export PACKAGE_PREFIX_LOWERCASE=""
     export PACKAGE_PREFIX_HYPHEN=""
-    if [[ ${VERBOSE} == "true" ]]; then
+    if [[ ${VERBOSE:="false"} == "true" ||  ${VERBOSE} == "True" ]]; then
         OPTIONAL_VERBOSE_FLAG+=("--verbose")
     fi
     readonly PACKAGE_TYPE
@@ -429,7 +429,7 @@ function get_providers_to_act_on() {
 
 # Starts group for GitHub Actions - makes logs much more readable
 function group_start {
-    if [[ ${GITHUB_ACTIONS=} == "true" ]]; then
+    if [[ ${GITHUB_ACTIONS:="false"} == "true" ||  ${GITHUB_ACTIONS} == "True" ]]; then
         echo "::group::${1}"
     else
         echo
@@ -440,7 +440,7 @@ function group_start {
 
 # Ends group for GitHub Actions
 function group_end {
-    if [[ ${GITHUB_ACTIONS=} == "true" ]]; then
+    if [[ ${GITHUB_ACTIONS:="false"} == "true" ||  ${GITHUB_ACTIONS} == "True" ]]; then
         echo -e "\033[0m"  # Disable any colors set in the group
         echo "::endgroup::"
     fi
diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh
index d525fb106c..2ed2a42610 100755
--- a/scripts/in_container/check_environment.sh
+++ b/scripts/in_container/check_environment.sh
@@ -84,7 +84,7 @@ function check_integration {
 
     local env_var_name
     env_var_name=INTEGRATION_${integration_name^^}
-    if [[ ${!env_var_name:=} != "true" ]]; then
+    if [[ ${!env_var_name:=} != "true" || ${!env_var_name} != "True" ]]; then
         if [[ ! ${DISABLED_INTEGRATIONS} == *" ${integration_name}"* ]]; then
             DISABLED_INTEGRATIONS="${DISABLED_INTEGRATIONS} ${integration_name}"
         fi
@@ -112,7 +112,7 @@ function check_db_backend {
 }
 
 function resetdb_if_requested() {
-    if [[ ${DB_RESET:="false"} == "true" ]]; then
+    if [[ ${DB_RESET:="false"} == "true" || ${DB_RESET} == "True" ]]; then
         echo
         echo "Resetting the DB"
         echo
@@ -125,7 +125,7 @@ function resetdb_if_requested() {
 }
 
 function startairflow_if_requested() {
-    if [[ ${START_AIRFLOW:="false"} == "true" ]]; then
+    if [[ ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" ]]; then
         echo
         echo "Starting Airflow"
         echo
@@ -143,13 +143,14 @@ function startairflow_if_requested() {
     return $?
 }
 
-echo "==============================================================================================="
-echo "             Checking integrations and backends"
-echo "==============================================================================================="
+echo
+echo "${COLOR_BLUE}Checking integrations and backends.${COLOR_RESET}"
+echo
+
 if [[ -n ${BACKEND=} ]]; then
     check_db_backend 50
-    echo "-----------------------------------------------------------------------------------------------"
 fi
+echo
 check_integration "Kerberos" "kerberos" "run_nc kdc-server-example-com 88" 50
 check_integration "MongoDB" "mongo" "run_nc mongo 27017" 50
 check_integration "Redis" "redis" "run_nc redis 6379" 50
@@ -168,8 +169,6 @@ CMD="curl --max-time 1 -X GET 'http://pinot:8000/health' -H 'accept: text/plain'
 check_integration "Pinot (Broker API)" "pinot" "${CMD}" 50
 check_integration "RabbitMQ" "rabbitmq" "run_nc rabbitmq 5672" 50
 
-echo "-----------------------------------------------------------------------------------------------"
-
 if [[ ${EXIT_CODE} != 0 ]]; then
     echo
     echo "Error: some of the CI environment failed to initialize!"
@@ -182,10 +181,8 @@ fi
 resetdb_if_requested
 startairflow_if_requested
 
-if [[ -n ${DISABLED_INTEGRATIONS=} ]]; then
-    echo
-    echo "Disabled integrations:${DISABLED_INTEGRATIONS}"
+if [[ -n ${DISABLED_INTEGRATIONS=} && (${VERBOSE=} == "true" || ${VERBOSE} == "True") ]]; then
     echo
-    echo "Enable them via --integration <INTEGRATION_NAME> flags (you can use 'all' for all)"
+    echo "${COLOR_BLUE}Those integrations are disabled: ${DISABLED_INTEGRATIONS}"
     echo
 fi
diff --git a/scripts/in_container/configure_environment.sh b/scripts/in_container/configure_environment.sh
index cfd0c04f49..d9638b3219 100644
--- a/scripts/in_container/configure_environment.sh
+++ b/scripts/in_container/configure_environment.sh
@@ -25,12 +25,8 @@ if [[ -d "${FILES_DIR}" ]]; then
     export AIRFLOW__CORE__DAGS_FOLDER="/files/dags"
     mkdir -pv "${AIRFLOW__CORE__DAGS_FOLDER}"
     sudo chown "${HOST_USER_ID}":"${HOST_GROUP_ID}" "${AIRFLOW__CORE__DAGS_FOLDER}"
-    echo "Your dags for webserver and scheduler are read from ${AIRFLOW__CORE__DAGS_FOLDER} directory"
-    echo "which is mounted from your <AIRFLOW_SOURCES>/files/dags folder"
-    echo
 else
     export AIRFLOW__CORE__DAGS_FOLDER="${AIRFLOW_HOME}/dags"
-    echo "Your dags for webserver and scheduler are read from ${AIRFLOW__CORE__DAGS_FOLDER} directory"
 fi
 
 
@@ -38,16 +34,11 @@ if [[ -d "${AIRFLOW_BREEZE_CONFIG_DIR}" && \
     -f "${AIRFLOW_BREEZE_CONFIG_DIR}/${VARIABLES_ENV_FILE}" ]]; then
     pushd "${AIRFLOW_BREEZE_CONFIG_DIR}" >/dev/null 2>&1 || exit 1
     echo
-    echo "Sourcing environment variables from ${VARIABLES_ENV_FILE} in ${AIRFLOW_BREEZE_CONFIG_DIR}"
+    echo "${COLOR_BLUE}Sourcing environment variables from ${VARIABLES_ENV_FILE} in ${AIRFLOW_BREEZE_CONFIG_DIR}${COLOR_RESET}"
     echo
      # shellcheck disable=1090
     source "${VARIABLES_ENV_FILE}"
     popd >/dev/null 2>&1 || exit 1
-else
-    echo
-    echo "You can add ${AIRFLOW_BREEZE_CONFIG_DIR} directory and place ${VARIABLES_ENV_FILE}"
-    echo "In it to make breeze source the variables automatically for you"
-    echo
 fi
 
 
@@ -55,14 +46,9 @@ if [[ -d "${AIRFLOW_BREEZE_CONFIG_DIR}" && \
     -f "${AIRFLOW_BREEZE_CONFIG_DIR}/${TMUX_CONF_FILE}" ]]; then
     pushd "${AIRFLOW_BREEZE_CONFIG_DIR}" >/dev/null 2>&1 || exit 1
     echo
-    echo "Using ${TMUX_CONF_FILE} from ${AIRFLOW_BREEZE_CONFIG_DIR}"
+    echo "${COLOR_BLUE}Using ${TMUX_CONF_FILE} from ${AIRFLOW_BREEZE_CONFIG_DIR}${COLOR_RESET}"
     echo
      # shellcheck disable=1090
     ln -sf "${AIRFLOW_BREEZE_CONFIG_DIR}/${TMUX_CONF_FILE}" ~
     popd >/dev/null 2>&1 || exit 1
-else
-    echo
-    echo "You can add ${AIRFLOW_BREEZE_CONFIG_DIR} directory and place ${TMUX_CONF_FILE}"
-    echo "in it to make breeze use your local ${TMUX_CONF_FILE} for tmux"
-    echo
 fi
diff --git a/scripts/in_container/run_ci_tests.sh b/scripts/in_container/run_ci_tests.sh
index 52a2242eba..efab241fb7 100755
--- a/scripts/in_container/run_ci_tests.sh
+++ b/scripts/in_container/run_ci_tests.sh
@@ -39,7 +39,7 @@ if [[ ${RES} == "139" ]]; then
 fi
 
 set +x
-if [[ "${RES}" == "0" && ${CI:="false"} == "true" ]]; then
+if [[ "${RES}" == "0" && ( ${CI:="false"} == "true" || ${CI} == "True" ) ]]; then
     echo "All tests successful"
     cp .coverage /files
 fi
@@ -61,7 +61,7 @@ if [[ ${TEST_TYPE:=} == "Quarantined" ]]; then
     fi
 fi
 
-if [[ ${CI:=} == "true" ]]; then
+if [[ ${CI:="false"} == "true" || ${CI} == "True" ]]; then
     if [[ ${RES} != "0" ]]; then
         echo
         echo "Dumping logs on error"
diff --git a/scripts/in_container/run_docs_build.sh b/scripts/in_container/run_docs_build.sh
index bcf94b9c80..0331557dc1 100755
--- a/scripts/in_container/run_docs_build.sh
+++ b/scripts/in_container/run_docs_build.sh
@@ -20,7 +20,7 @@
 
 sudo -E "${AIRFLOW_SOURCES}/docs/build_docs.py" "${@}"
 
-if [[ ${CI:="false"} == "true" && -d "${AIRFLOW_SOURCES}/docs/_build/docs/" ]]; then
+if [[ ( ${CI:="false"} == "true" || ${CI} == "True" ) && -d "${AIRFLOW_SOURCES}/docs/_build/docs/" ]]; then
     rm -rf "/files/documentation"
     cp -r "${AIRFLOW_SOURCES}/docs/_build" "/files/documentation"
 fi
diff --git a/scripts/in_container/run_init_script.sh b/scripts/in_container/run_init_script.sh
index 0bd685cd9a..48f7716f7c 100755
--- a/scripts/in_container/run_init_script.sh
+++ b/scripts/in_container/run_init_script.sh
@@ -37,9 +37,4 @@ if [[ -d "${AIRFLOW_BREEZE_CONFIG_DIR}" && \
          # shellcheck disable=1090
         source "${INIT_SCRIPT_FILE}"
         popd >/dev/null 2>&1 || exit 1
-else
-    echo
-    echo "You can add ${AIRFLOW_BREEZE_CONFIG_DIR} directory and place ${INIT_SCRIPT_FILE}"
-    echo "In it to make breeze source an initialization script automatically for you"
-    echo
 fi
diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh
index 2addc616ec..a5c6904beb 100755
--- a/scripts/in_container/run_install_and_test_provider_packages.sh
+++ b/scripts/in_container/run_install_and_test_provider_packages.sh
@@ -280,7 +280,7 @@ setup_provider_packages
 verify_parameters
 install_airflow_as_specified
 
-if [[ ${SKIP_TWINE_CHECK=""} != "true" ]]; then
+if [[ ${SKIP_TWINE_CHECK=""} != "true" &&  ${SKIP_TWINE_CHECK=""} != "True" ]]; then
     # Airflow 2.1.0 installs importlib_metadata version that does not work well with twine
     # So we should skip twine check in this case
     twine_check_provider_packages
diff --git a/scripts/in_container/run_prepare_provider_documentation.sh b/scripts/in_container/run_prepare_provider_documentation.sh
index 68e06dfe96..a8745ff16e 100755
--- a/scripts/in_container/run_prepare_provider_documentation.sh
+++ b/scripts/in_container/run_prepare_provider_documentation.sh
@@ -111,7 +111,7 @@ function run_prepare_documentation() {
         echo "${COLOR_RED}There were errors when preparing documentation. Exiting! ${COLOR_RESET}"
         exit 1
     else
-        if [[ ${GENERATE_PROVIDERS_ISSUE=} == "true" ]]; then
+        if [[ ${GENERATE_PROVIDERS_ISSUE=} == "true" ||  ${GENERATE_PROVIDERS_ISSUE} == "True" ]]; then
             echo
             python3 dev/provider_packages/prepare_provider_packages.py generate-issue-content "${prepared_documentation[@]}"
             echo
@@ -157,7 +157,7 @@ if [[ $# != "0" && ${1} =~ ^[0-9][0-9][0-9][0-9]\.[0-9][0-9]\.[0-9][0-9]$ ]]; th
 fi
 
 OPTIONAL_NON_INTERACTIVE_FLAG=()
-if [[ ${NON_INTERACTIVE=} == "true" ]]; then
+if [[ ${NON_INTERACTIVE=} == "true" || ${NON_INTERACTIVE} == "True" ]]; then
     OPTIONAL_NON_INTERACTIVE_FLAG+=("--non-interactive")
 fi
 
diff --git a/scripts/in_container/run_system_tests.sh b/scripts/in_container/run_system_tests.sh
index 2b1181c025..f77a58f6ff 100755
--- a/scripts/in_container/run_system_tests.sh
+++ b/scripts/in_container/run_system_tests.sh
@@ -44,11 +44,11 @@ pytest "${PYTEST_ARGS[@]}"
 RES=$?
 
 set +x
-if [[ "${RES}" == "0" && ${GITHUB_ACTIONS} == "true" ]]; then
+if [[ "${RES}" == "0" && ( ${GITHUB_ACTIONS=} == "true" || ${GITHUB_ACTIONS} == "True" ) ]]; then
     echo "All tests successful"
 fi
 
-if [[ ${GITHUB_ACTIONS} == "true" ]]; then
+if [[ ${GITHUB_ACTIONS=} == "true" || ${GITHUB_ACTIONS} == "True" ]]; then
     dump_airflow_logs
 fi