You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2020/02/17 16:29:17 UTC

[GitHub] [airflow] potiuk commented on a change in pull request #7433: [AIRFLOW-6820] split breeze into functions

potiuk commented on a change in pull request #7433: [AIRFLOW-6820] split breeze into functions
URL: https://github.com/apache/airflow/pull/7433#discussion_r380276920
 
 

 ##########
 File path: breeze
 ##########
 @@ -16,146 +16,245 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-# Bash sanity settings (error on exit, complain for undefined vars, error when pipe fails)
 set -euo pipefail
 
-export BREEZE=true
-
 MY_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
 
-export AIRFLOW_SOURCES="${MY_DIR}"
-
-# Directory where all CI scripts are located
-export SCRIPTS_CI_DIR="${MY_DIR}/scripts/ci"
-
-BUILD_CACHE_DIR="${MY_DIR}/.build"
-FILES_DIR="${MY_DIR}/files"
-TMP_DIR="${MY_DIR}/tmp"
-
-mkdir -pv "${BUILD_CACHE_DIR}"
-mkdir -pv "${TMP_DIR}"
-mkdir -pv "${FILES_DIR}"
-
-# Note - we do not use __script_init.sh here because it can only be used from within
-# the CI directory and we need to overrride PYTHON_VERSION based on what we store
-# in the .build directory
+# Bash arrays need to be defined outside of functions unfortunately :(
+# Array with extra options for Docker compose
+declare -a EXTRA_DC_OPTIONS
+# Array with selected integrations
+declare -a INTEGRATIONS
+# This is where remaining args are passed
+declare -a REMAINING_ARGS
 
-# Beginning of the initialisation here
+function setup_default_breeze_variables() {
+    # Whether to actually run docker compose with the command set given
+    export COMMAND_TO_RUN="enter_breeze"
+    export BREEZE=true
 
-# shellcheck source=scripts/ci/_utils.sh
-. "${SCRIPTS_CI_DIR}/_utils.sh"
+    export AIRFLOW_SOURCES="${MY_DIR}"
 
-initialize_breeze_environment
-
-export PYTHON_VERSION="${PYTHON_VERSION:=$(read_from_file PYTHON_VERSION)}"
-
-basic_sanity_checks
+    # Directory where all CI scripts are located
+    export SCRIPTS_CI_DIR="${MY_DIR}/scripts/ci"
 
-script_start
+    export BUILD_CACHE_DIR="${MY_DIR}/.build"
+    export FILES_DIR="${MY_DIR}/files"
+    export TMP_DIR="${MY_DIR}/tmp"
 
-trap script_end EXIT
+    mkdir -pv "${BUILD_CACHE_DIR}"
+    mkdir -pv "${TMP_DIR}"
+    mkdir -pv "${FILES_DIR}"
 
-# End of initialisation here
+    # Note - we do not use __script_init.sh here because it can only be used from within
+    # the CI directory and we need to overrride PYTHON_VERSION based on what we store
+    # in the .build directory
 
+    # Beginning of the initialisation here
 
-# Sets width of the screen
-SEPARATOR_WIDTH="$(tput cols)"
+    # shellcheck source=scripts/ci/_utils.sh
+    . "${SCRIPTS_CI_DIR}/_utils.sh"
 
-# Name of the script
-CMDNAME="$(basename -- "$0")"
+    export PYTHON_VERSION="${PYTHON_VERSION:=$(read_from_file PYTHON_VERSION)}"
 
-# Update short and long options in the breeze-complete script
-# This way autocomplete will work automatically with all options
-# shellcheck source=breeze-complete
-. "${MY_DIR}/breeze-complete"
+    # Sets width of the screen
+    SEPARATOR_WIDTH="$(tput cols)"
 
-# Whether to actually run docker compose with the command set given
-ENTER_ENVIRONMENT="true"
+    # Name of the script
+    CMDNAME="$(basename -- "$0")"
 
+    # Update short and long options in the breeze-complete script
+    # This way autocomplete will work automatically with all options
+    # shellcheck source=breeze-complete
+    . "${MY_DIR}/breeze-complete"
 
-# Whether to cleanup local image
-CLEANUP_IMAGES="false"
+    # Skips mounting local Airflow sources
+    export SKIP_MOUNTING_LOCAL_SOURCES="false"
 
-# Skips mounting local Airflow sources
-SKIP_MOUNTING_LOCAL_SOURCES="false"
+    # Holds chosen command if the -x flag is used.
+    export RUN_COMMAND=""
 
-# If set, we initialize local virtualenv and install all dependencies
-INITIALIZE_LOCAL_VIRTUALENV=false
+    # Holds the test target if the -t flag is used.
+    export TEST_TARGET=""
 
-# If set, we setup autocomplete for breeze
-SETUP_AUTOCOMPLETE=false
+    # Holds docker compose command if the -d flag is used.
+    export DOCKER_COMPOSE_COMMAND=""
 
-# Holds chosen command if the -x flag is used.
-RUN_COMMAND=""
+    # If true, the docker images are rebuilt locally.
+    export NEEDS_DOCKER_BUILD="false"
 
-# Holds the test target if the -t flag is used.
-TEST_TARGET=""
+    # By default we only pull images if we do not have them locally.
+    # This can be overridden by -p flag
+    export FORCE_PULL_IMAGES="false"
 
-# Holds docker compose command if the -d flag is used.
-DOCKER_COMPOSE_COMMAND=""
+    # Runtime is empty initially (might be set to kubernetes in case kubernetes is chosen)
+    export RUNTIME=""
 
-#extra options for Docker compose
-EXTRA_DC_OPTIONS=()
+    # Do not enable Kind Kubernetes cluster by default
+    export ENABLE_KIND_CLUSTER="false"
 
-# If true, the docker images are rebuilt locally.
-export AIRFLOW_CONTAINER_DOCKER_BUILD_NEEDED="false"
+    # Do not recreate Kubernetes cluster by default
+    export RECREATE_KIND_CLUSTER="false"
 
-# By default we only pull images if we do not have them locally.
-# This can be overridden by -p flag
-export AIRFLOW_CONTAINER_FORCE_PULL_IMAGES="false"
+    # Do not stop Kubernetes cluster by default
+    export STOP_KIND_CLUSTER="false"
 
-# Start airflow-testing image with all the dependencies
-export AIRFLOW_CONTAINER_DEPS="true"
+    # We use docker image caches by default to speed up the builds
+    export USE_PULLED_IMAGES_AS_CACHE=${USE_PULLED_IMAGES_AS_CACHE:="true"}
 
-# Runtime is empty initially (might be set to kubernetes in case kubernetes is chosen)
-export RUNTIME=""
+    # By default we do not push images. This can be overridden by -u flag.
+    export PUSH_IMAGES=${PUSH_IMAGES:="false"}
 
-# Do not enable Kind Kubernetes cluster by default
-export ENABLE_KIND_CLUSTER="false"
+    # Determine version of the Airflow from version.py
+    AIRFLOW_VERSION=$(cat airflow/version.py - << EOF | python
+print(version.replace("+",""))
+EOF
+    )
+    export AIRFLOW_VERSION
 
-# Do not recreate Kubernetes cluster by default
-export RECREATE_KIND_CLUSTER="false"
+    # Verbosity in running ci scripts
+    export VERBOSE="false"
 
-# Do not stop Kubernetes cluster by default
-export STOP_KIND_CLUSTER="false"
+    # Whether to force build without checking if it is needed
+    export FORCE_BUILD_IMAGES=${FORCE_BUILD_IMAGES:="false"}
 
-# We use docker image caches by default to speed up the builds
-export AIRFLOW_CONTAINER_USE_PULLED_IMAGES_CACHE=${AIRFLOW_CONTAINER_USE_PULLED_IMAGES_CACHE:="true"}
+    # Files determining whether asciiart/cheatsheet are suppressed
+    SUPPRESS_CHEATSHEET_FILE="${MY_DIR}/.suppress_cheatsheet"
+    SUPPRESS_ASCIIART_FILE="${MY_DIR}/.suppress_asciiart"
 
-# By default we do not push images. This can be overridden by -u flag.
-export AIRFLOW_CONTAINER_PUSH_IMAGES=${AIRFLOW_CONTAINER_PUSH_IMAGES:="false"}
+    # Default values for flags
 
-# Skip building full CI image locally
-export AIRFLOW_CONTAINER_SKIP_CI_IMAGE="false"
+    _BREEZE_DEFAULT_BACKEND="sqlite"
+    _BREEZE_DEFAULT_KUBERNETES_MODE="git_mode"
+    _BREEZE_DEFAULT_KUBERNETES_VERSION="v1.15.3"
+}
+# End of initialisation here
 
-# Branch name of the base image used (usually master or v1-10-test or v1-10-stable)
-export AIRFLOW_CONTAINER_BRANCH_NAME=${AIRFLOW_CONTAINER_BRANCH_NAME:=${DEFAULT_BRANCH}}
+function initialize_virtualenv() {
+   # Check if we are in virtualenv
+   set +e
+   echo -e "import sys\nif not hasattr(sys,'base_prefix'):\n  sys.exit(1)" | "python${PYTHON_VERSION}"
 
 Review comment:
   Ah yeah. Will take a close look at this. I tested it on Linux and made sone fixes to the initialize virtualenv but I will test it now with anaconda/pyenv/venv and any other *env I can put my hands on. I will do it in another PR though :)

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services