You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/06/25 14:11:11 UTC

[airflow] branch main updated: Add Python 3.9 support (#15515)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new ce44b62  Add Python 3.9 support (#15515)
ce44b62 is described below

commit ce44b628904e4f7480a2c208b5d5e087526408b6
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Fri Jun 25 16:10:55 2021 +0200

    Add Python 3.9 support (#15515)
    
    This includes several things:
    
    * added per-provider support for python version. Each provider
      can now declare python versions it does not support
    * excluded ldap core extra from Python 3.9.
    * skip relevant tests in Python 3.9
---
 BREEZE.rst                                         | 14 +++++-----
 CI.rst                                             |  4 +--
 CONTRIBUTING.rst                                   |  2 +-
 CONTRIBUTORS_QUICK_START.rst                       |  2 +-
 Dockerfile                                         |  7 +++--
 Dockerfile.ci                                      |  8 +++---
 IMAGES.rst                                         |  2 +-
 LOCAL_VIRTUALENV.rst                               |  6 ++---
 PULL_REQUEST_WORKFLOW.rst                          |  2 +-
 README.md                                          |  4 +--
 airflow/__init__.py                                |  3 ++-
 airflow/provider.yaml.schema.json                  |  7 +++++
 airflow/providers/apache/hive/provider.yaml        |  3 +++
 breeze                                             |  4 +--
 breeze-complete                                    |  2 +-
 .../PROVIDER_README_TEMPLATE.rst.jinja2            |  2 ++
 dev/provider_packages/SETUP_TEMPLATE.py.jinja2     |  8 +++---
 dev/provider_packages/prepare_provider_packages.py | 12 +++++++++
 dev/retag_docker_images.py                         |  2 +-
 scripts/ci/libraries/_build_images.sh              |  3 +--
 scripts/ci/libraries/_initialization.sh            |  4 +--
 scripts/ci/libraries/_push_pull_remove_images.sh   |  4 +--
 scripts/ci/selective_ci_checks.sh                  |  6 ++---
 scripts/ci/tools/ci_fix_ownership.sh               |  2 +-
 scripts/ci/tools/prepare_prod_docker_images.sh     |  2 +-
 setup.cfg                                          |  3 ++-
 tests/bats/breeze/test_breeze_complete.bats        |  8 +++---
 tests/plugins/test_plugins_manager.py              | 28 ++++++++++++++-----
 tests/providers/apache/hive/hooks/test_hive.py     | 31 ++++++++++++++++++++++
 .../apache/hive/transfers/test_hive_to_mysql.py    |  9 +++++++
 .../apache/hive/transfers/test_hive_to_samba.py    |  9 +++++++
 .../apache/hive/transfers/test_mssql_to_hive.py    | 14 +++++++---
 .../apache/hive/transfers/test_mysql_to_hive.py    |  7 +++++
 .../log/elasticmock/fake_elasticsearch.py          |  2 +-
 tests/sensors/test_base.py                         |  1 +
 tests/sensors/test_smart_sensor_operator.py        |  2 ++
 36 files changed, 167 insertions(+), 62 deletions(-)

diff --git a/BREEZE.rst b/BREEZE.rst
index d2e786f..fea90fa 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1264,7 +1264,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -a, --install-airflow-version INSTALL_AIRFLOW_VERSION
           Uses different version of Airflow when building PROD image.
@@ -1494,7 +1494,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -I, --production-image
           Use production image for entering the environment and builds (not for tests).
@@ -1561,7 +1561,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -v, --verbose
           Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
@@ -1684,7 +1684,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
 
   ####################################################################################################
@@ -1879,7 +1879,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -b, --backend BACKEND
           Backend to use for tests - it determines which database is used.
@@ -1943,7 +1943,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -F, --force-build-images
           Forces building of the local docker images. The images are rebuilt
@@ -2351,7 +2351,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   ****************************************************************************************************
    Choose backend to run for Airflow
diff --git a/CI.rst b/CI.rst
index 6cfe746..af5e8aa 100644
--- a/CI.rst
+++ b/CI.rst
@@ -57,7 +57,7 @@ Container Registry used as cache
 For the CI builds of our we are using Container Registry to store results of the "Build Image" workflow
 and pass it to the "CI Build" workflow.
 
-Currently in main version of Airflow we run tests in 3 different versions of Python (3.6, 3.7, 3.8)
+Currently in main version of Airflow we run tests in 4 different versions of Python (3.6, 3.7, 3.8, 3.9)
 which means that we have to build 6 images (3 CI ones and 3 PROD ones). Yet we run around 12 jobs
 with each of the CI images. That is a lot of time to just build the environment to run. Therefore
 we are utilising ``pull_request_target`` feature of GitHub Actions.
@@ -779,7 +779,7 @@ The image names follow the patterns:
 +--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+
 
 * <BRANCH> might be either "main" or "v1-10-test" or "v2-*-test"
-* <X.Y> - Python version (Major + Minor). For "main" and "v2-*-test" should be in ["3.6", "3.7", "3.8"].
+* <X.Y> - Python version (Major + Minor). For "main" and "v2-*-test" should be in ["3.6", "3.7", "3.8", "3.9"].
 * <COMMIT_SHA> - for images that get merged to "main", "v2-*-test" of "v1-10-test", or built as part of a
   pull request the images are tagged with the (full length) commit SHA of that particular branch. For pull
   requests the SHA used is the tip of the pull request branch.
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index d89c943..f269170 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -873,7 +873,7 @@ This can be done by running this (it utilizes parallel preparation of the constr
 
 .. code-block:: bash
 
-    export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.6 3.7 3.8"
+    export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.6 3.7 3.8 3.9"
     for python_version in $(echo "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}")
     do
       ./breeze build-image --upgrade-to-newer-dependencies --python ${python_version} --build-cache-local
diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst
index 4c50af6..132112e 100644
--- a/CONTRIBUTORS_QUICK_START.rst
+++ b/CONTRIBUTORS_QUICK_START.rst
@@ -135,7 +135,7 @@ Pyenv and setting up virtual-env
       libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \
       xz-utils tk-dev libffi-dev liblzma-dev python-openssl git
 
-  $ sudo apt install build-essentials python3.6-dev python3.7-dev python3.8-dev python-dev openssl \
+  $ sudo apt install build-essentials python3.6-dev python3.7-dev python3.8-dev python3.9-dev python-dev openssl \
        sqlite sqlite-dev default-libmysqlclient-dev libmysqld-dev postgresql
 
 2. Install pyenv
diff --git a/Dockerfile b/Dockerfile
index 39a13dc..6a4b75d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -44,7 +44,7 @@ ARG AIRFLOW_GID="50000"
 
 ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster"
 
-ARG AIRFLOW_PIP_VERSION=21.1.1
+ARG AIRFLOW_PIP_VERSION=21.1.2
 
 # By default PIP has progress bar but you can disable it.
 ARG PIP_PROGRESS_BAR="on"
@@ -232,11 +232,10 @@ ARG INSTALL_FROM_DOCKER_CONTEXT_FILES=""
 ARG INSTALL_FROM_PYPI="true"
 # Those are additional constraints that are needed for some extras but we do not want to
 # Force them on the main Airflow package.
-# * chardet<4 and certifi<2021.0.0 required to keep snowflake happy
-# * urllib3 - required to keep boto3 happy
+# * certifi<2021.0.0 required to keep snowflake happy
 # * pyjwt<2.0.0: flask-jwt-extended requires it
 # * dill<0.3.3 required by apache-beam
-ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 urllib3<1.26 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
+ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
 ARG CONTINUE_ON_PIP_CHECK_FAILURE="false"
 
 
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 3b6297a..552afd8 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -219,7 +219,7 @@ ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
 # By default in the image, we are installing all providers when installing from sources
 ARG INSTALL_PROVIDERS_FROM_SOURCES="true"
 ARG INSTALL_FROM_PYPI="true"
-ARG AIRFLOW_PIP_VERSION=21.1.1
+ARG AIRFLOW_PIP_VERSION=21.1.2
 # Setup PIP
 # By default PIP install run without cache to make image smaller
 ARG PIP_NO_CACHE_DIR="true"
@@ -263,13 +263,11 @@ ENV AIRFLOW_REPO=${AIRFLOW_REPO}\
 
 # Those are additional constraints that are needed for some extras but we do not want to
 # force them on the main Airflow package. Those limitations are:
-# * chardet<4 and certifi<2021.0.0: required by snowflake provider
+# * certifi<2021.0.0: required by snowflake provider
 # * lazy-object-proxy<1.5.0: required by astroid
-# * pyOpenSSL: required by snowflake provider https://github.com/snowflakedb/snowflake-connector-python/blob/v2.3.6/setup.py#L201
-# * urllib3<1.26: Required to keep boto3 happy
 # * pyjwt<2.0.0: flask-jwt-extended requires it
 # * dill<0.3.3 required by apache-beam
-ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 lazy-object-proxy<1.5.0 pyOpenSSL<20.0.0 urllib3<1.26 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
+ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="lazy-object-proxy<1.5.0 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
 ARG UPGRADE_TO_NEWER_DEPENDENCIES="false"
 ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} \
     UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES}
diff --git a/IMAGES.rst b/IMAGES.rst
index e38257f..c3a1805 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -81,7 +81,7 @@ where:
   built from branches so they change over time. The ``2.*.*`` labels are built from git tags
   and they are "fixed" once built.
 * ``PYTHON_MAJOR_MINOR_VERSION`` - version of Python used to build the image. Examples: ``3.6``, ``3.7``,
-  ``3.8``
+  ``3.8``, ``3.9``
 * The ``-ci`` suffix is added for CI images
 * The ``-manifest`` is added for manifest images (see below for explanation of manifest images)
 
diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst
index f97f89a..b389442 100644
--- a/LOCAL_VIRTUALENV.rst
+++ b/LOCAL_VIRTUALENV.rst
@@ -51,8 +51,8 @@ Required Software Packages
 Use system-level package managers like yum, apt-get for Linux, or
 Homebrew for macOS to install required software packages:
 
-* Python (One of: 3.6, 3.7, 3.8)
-* MySQL
+* Python (One of: 3.6, 3.7, 3.8, 3.9)
+* MySQL 5.7+
 * libxml
 
 Refer to the `Dockerfile.ci <Dockerfile.ci>`__ for a comprehensive list
@@ -102,7 +102,7 @@ Creating a Local virtualenv
 
 To use your IDE for Airflow development and testing, you need to configure a virtual
 environment. Ideally you should set up virtualenv for all Python versions that Airflow
-supports (3.6, 3.7, 3.8).
+supports (3.6, 3.7, 3.8, 3.9).
 
 To create and initialize the local virtualenv:
 
diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst
index 96cc5b3..3e3cce6 100644
--- a/PULL_REQUEST_WORKFLOW.rst
+++ b/PULL_REQUEST_WORKFLOW.rst
@@ -58,7 +58,7 @@ We approached the problem by:
 3) Even more optimisation came from limiting the scope of tests to only "default" matrix parameters. So far
    in Airflow we always run all tests for all matrix combinations. The primary matrix components are:
 
-   * Python versions (currently 3.6, 3.7, 3.8)
+   * Python versions (currently 3.6, 3.7, 3.8, 3.9)
    * Backend types (currently MySQL/Postgres)
    * Backed version (currently MySQL 5.7, MySQL 8, Postgres 9.6, Postgres 13
 
diff --git a/README.md b/README.md
index ec7eebd..5510f33 100644
--- a/README.md
+++ b/README.md
@@ -127,9 +127,9 @@ We **highly** recommend upgrading to the latest Airflow major release at the ear
 
 Apache Airflow is tested with:
 
-|                      | Main version (dev)        | Stable version (2.0.2)   |
+|                      | Main version (dev)        | Stable version (2.1.0)   |
 | -------------------- | ------------------------- | ------------------------ |
-| Python               | 3.6, 3.7, 3.8             | 3.6, 3.7, 3.8            |
+| Python               | 3.6, 3.7, 3.8, 3.9        | 3.6, 3.7, 3.8            |
 | Kubernetes           | 1.20, 1.19, 1.18          | 1.20, 1.19, 1.18         |
 | PostgreSQL           | 9.6, 10, 11, 12, 13       | 9.6, 10, 11, 12, 13      |
 | MySQL                | 5.7, 8                    | 5.7, 8                   |
diff --git a/airflow/__init__.py b/airflow/__init__.py
index 7ecc487..6d04886 100644
--- a/airflow/__init__.py
+++ b/airflow/__init__.py
@@ -36,7 +36,7 @@ from airflow import version
 
 __version__ = version.version
 
-__all__ = ['__version__', 'login', 'DAG']
+__all__ = ['__version__', 'login', 'DAG', 'PY36', 'PY37', 'PY38', 'PY39']
 
 # Make `airflow` an namespace package, supporting installing
 # airflow.providers.* in different locations (i.e. one in site, and one in user
@@ -50,6 +50,7 @@ login: Optional[Callable] = None
 PY36 = sys.version_info >= (3, 6)
 PY37 = sys.version_info >= (3, 7)
 PY38 = sys.version_info >= (3, 8)
+PY39 = sys.version_info >= (3, 9)
 
 
 def __getattr__(name):
diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json
index b8a2c13..6891206 100644
--- a/airflow/provider.yaml.schema.json
+++ b/airflow/provider.yaml.schema.json
@@ -28,6 +28,13 @@
         "type": "string"
       }
     },
+    "excluded-python-versions": {
+      "description": "List of python versions excluded for that provider",
+      "type": "array",
+      "items": {
+          "type": "string"
+      }
+    },
     "integrations": {
       "description": "List of integrations supported by the provider.",
       "type": "array",
diff --git a/airflow/providers/apache/hive/provider.yaml b/airflow/providers/apache/hive/provider.yaml
index 803b6ec..9d02184 100644
--- a/airflow/providers/apache/hive/provider.yaml
+++ b/airflow/providers/apache/hive/provider.yaml
@@ -31,6 +31,9 @@ versions:
 additional-dependencies:
   - apache-airflow>=2.1.0
 
+excluded-python-versions:
+  - "3.9"
+
 integrations:
   - integration-name: Apache Hive
     external-doc-url: https://hive.apache.org/
diff --git a/breeze b/breeze
index dcd2fb7..7b0087b 100755
--- a/breeze
+++ b/breeze
@@ -3544,7 +3544,7 @@ function breeze::run_breeze_command() {
         docker_engine_resources::check_all_resources
         if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
             ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}"
-            ${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh"
+            ${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh"  || true
         else
             ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}"
         fi
@@ -3658,7 +3658,7 @@ function breeze::run_breeze_command() {
 # We have different versions of images depending on the python version used. We keep up with the
 # Latest patch-level changes in Python (this is done automatically during CI builds) so we have
 # To only take into account MAJOR and MINOR version of python. This variable keeps the major/minor
-# version of python in X.Y format (3.6, 3.7, 3.8 etc).
+# version of python in X.Y format (3.6, 3.7, 3.8, 3.9).
 #
 # In Breeze the precedence of setting the version is as follows:
 #      1. --python flag (if set, it will explicitly override it in the next step)
diff --git a/breeze-complete b/breeze-complete
index 57a69ff..26d2204 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -23,7 +23,7 @@
 # by the BATS tests automatically during pre-commit and CI
 # Those cannot be made read-only as the breeze-complete must be re-sourceable
 
-_breeze_allowed_python_major_minor_versions="3.6 3.7 3.8"
+_breeze_allowed_python_major_minor_versions="3.6 3.7 3.8 3.9"
 _breeze_allowed_backends="sqlite mysql postgres mssql"
 _breeze_allowed_integrations="cassandra kerberos mongo openldap pinot rabbitmq redis statsd trino all"
 _breeze_allowed_generate_constraints_modes="source-providers pypi-providers no-providers"
diff --git a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
index c9837b1..cb65758 100644
--- a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
+++ b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
@@ -47,6 +47,8 @@ Installation
 You can install this package on top of an existing airflow 2.1+ installation via
 ``pip install {{PACKAGE_PIP_NAME}}``
 
+The package supports the following python versions: {{ ",".join(SUPPORTED_PYTHON_VERSIONS) }}
+
 {%- if PIP_REQUIREMENTS %}
 
 PIP requirements
diff --git a/dev/provider_packages/SETUP_TEMPLATE.py.jinja2 b/dev/provider_packages/SETUP_TEMPLATE.py.jinja2
index cfe82e2..69dcdac 100644
--- a/dev/provider_packages/SETUP_TEMPLATE.py.jinja2
+++ b/dev/provider_packages/SETUP_TEMPLATE.py.jinja2
@@ -68,16 +68,16 @@ def do_setup():
             'Intended Audience :: Developers',
             'Intended Audience :: System Administrators',
             'License :: OSI Approved :: Apache Software License',
-            'Programming Language :: Python :: 3.6',
-            'Programming Language :: Python :: 3.7',
-            'Programming Language :: Python :: 3.8',
+{%- for python_version in SUPPORTED_PYTHON_VERSIONS %}
+            'Programming Language :: Python :: {{ python_version }}',
+{%- endfor %}
             'Topic :: System :: Monitoring',
         ],
         author='Apache Software Foundation',
         author_email='dev@airflow.apache.org',
         url='https://airflow.apache.org/',
         download_url='https://archive.apache.org/dist/airflow/{{ PROVIDERS_FOLDER }}',
-        python_requires='~=3.6',
+        python_requires='{{ PYTHON_REQUIRES }}',
         project_urls={
             'Documentation': 'https://airflow.apache.org/docs/{{ PACKAGE_PIP_NAME }}/{{RELEASE}}/',
             'Bug Tracker': 'https://github.com/apache/airflow/issues',
diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index c27c88b..49dc393 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -51,6 +51,8 @@ from rich.console import Console
 from rich.progress import Progress
 from rich.syntax import Syntax
 
+ALL_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+
 try:
     from yaml import CSafeLoader as SafeLoader
 except ImportError:
@@ -208,6 +210,7 @@ class ProviderPackageDetails(NamedTuple):
     documentation_provider_package_path: str
     provider_description: str
     versions: List[str]
+    excluded_python_versions: List[str]
 
 
 ENTITY_NAMES = {
@@ -1445,6 +1448,7 @@ def get_provider_details(provider_package_id: str) -> ProviderPackageDetails:
         documentation_provider_package_path=get_documentation_package_path(provider_package_id),
         provider_description=provider_info['description'],
         versions=provider_info['versions'],
+        excluded_python_versions=provider_info.get("excluded-python-versions") or [],
     )
 
 
@@ -1483,6 +1487,12 @@ def get_provider_jinja_context(
     )
     with open(changelog_path) as changelog_file:
         changelog = changelog_file.read()
+    supported_python_versions = [
+        p for p in ALL_PYTHON_VERSIONS if p not in provider_details.excluded_python_versions
+    ]
+    python_requires = "~=3.6"
+    for p in provider_details.excluded_python_versions:
+        python_requires += f", !={p}"
     context: Dict[str, Any] = {
         "ENTITY_TYPES": list(EntityType),
         "README_FILE": "README.rst",
@@ -1517,6 +1527,8 @@ def get_provider_jinja_context(
             provider_details.documentation_provider_package_path,
         ),
         "CHANGELOG": changelog,
+        "SUPPORTED_PYTHON_VERSIONS": supported_python_versions,
+        "PYTHON_REQUIRES": python_requires,
     }
     return context
 
diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py
index 2e83475..91e8dc4 100755
--- a/dev/retag_docker_images.py
+++ b/dev/retag_docker_images.py
@@ -47,7 +47,7 @@ from typing import List
 
 import click
 
-PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
 
 DOCKERHUB_IMAGES = [
     "{prefix}:python{python_version}-{branch}",
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 099cf85..598d944 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -495,7 +495,6 @@ function build_images::rebuild_ci_image_if_needed() {
         push_pull_remove_images::pull_ci_images_if_needed
         return
     fi
-
     local needs_docker_build="false"
     md5sum::check_if_docker_build_is_needed
     build_images::get_local_build_cache_hash
@@ -526,7 +525,7 @@ function build_images::rebuild_ci_image_if_needed() {
                 local root_files_count
                 root_files_count=$(find "airflow" "tests" -user root | wc -l | xargs)
                 if [[ ${root_files_count} != "0" ]]; then
-                    ./scripts/ci/tools/ci_fix_ownership.sh
+                    ./scripts/ci/tools/ci_fix_ownership.sh || true
                 fi
             fi
             verbosity::print_info
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index 87fd5a3..6fd5d21 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -103,11 +103,11 @@ function initialization::initialize_base_variables() {
     export PRODUCTION_IMAGE="false"
 
     # All supported major/minor versions of python in all versions of Airflow
-    ALL_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8")
+    ALL_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8" "3.9")
     export ALL_PYTHON_MAJOR_MINOR_VERSIONS
 
     # Currently supported major/minor versions of python
-    CURRENT_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8")
+    CURRENT_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8" "3.9")
     export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS
 
     # Currently supported versions of Postgres
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index d7bc17a..a6a1c5c 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -144,7 +144,7 @@ function push_pull_remove_images::pull_base_python_image() {
         push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PYTHON_BASE_IMAGE}" \
             "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${python_tag_suffix}"
     else
-        docker_v pull "${AIRFLOW_PYTHON_BASE_IMAGE}"
+        docker_v pull "${AIRFLOW_PYTHON_BASE_IMAGE}" || true
     fi
 }
 
@@ -161,7 +161,7 @@ function push_pull_remove_images::pull_ci_images_if_needed() {
             push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" \
                 "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
         else
-            push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_CI_IMAGE}"
+            push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_CI_IMAGE}" || true
         fi
     fi
 }
diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh
index 65b3624..b0ab297 100755
--- a/scripts/ci/selective_ci_checks.sh
+++ b/scripts/ci/selective_ci_checks.sh
@@ -126,9 +126,9 @@ function output_all_basic_variables() {
 
     if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then
         initialization::ga_output postgres-exclude '[{ "python-version": "3.6" }]'
-        initialization::ga_output mysql-exclude '[{ "python-version": "3.7" }]'
-        initialization::ga_output mssql-exclude '[{ "python-version": "3.7" }]'
-        initialization::ga_output sqlite-exclude '[{ "python-version": "3.8" }]'
+        initialization::ga_output mysql-exclude '[{ "python-version": "3.7" }, { "python-version": "3.9" }]'
+        initialization::ga_output mssql-exclude '[{ "python-version": "3.6" }, { "python-version": "3.8" }]'
+        initialization::ga_output sqlite-exclude '[{ "python-version": "3.7" }, { "python-version": "3.8" }]'
     else
         initialization::ga_output postgres-exclude '[]'
         initialization::ga_output mysql-exclude '[]'
diff --git a/scripts/ci/tools/ci_fix_ownership.sh b/scripts/ci/tools/ci_fix_ownership.sh
index 56463d2..6ed1161 100755
--- a/scripts/ci/tools/ci_fix_ownership.sh
+++ b/scripts/ci/tools/ci_fix_ownership.sh
@@ -37,4 +37,4 @@ docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
     --rm \
     --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
     "${AIRFLOW_CI_IMAGE}" \
-    -c /opt/airflow/scripts/in_container/run_fix_ownership.sh
+    -c /opt/airflow/scripts/in_container/run_fix_ownership.sh || true
diff --git a/scripts/ci/tools/prepare_prod_docker_images.sh b/scripts/ci/tools/prepare_prod_docker_images.sh
index bd3436f..ad6cc95 100755
--- a/scripts/ci/tools/prepare_prod_docker_images.sh
+++ b/scripts/ci/tools/prepare_prod_docker_images.sh
@@ -38,7 +38,7 @@ fi
 
 export INSTALL_AIRFLOW_VERSION="${1}"
 
-for python_version in "3.6" "3.7" "3.8"
+for python_version in "3.6" "3.7" "3.8" "3.9"
 do
   export PYTHON_MAJOR_MINOR_VERSION=${python_version}
   "${AIRFLOW_SOURCES_DIR}/scripts/ci/images/ci_build_dockerhub.sh"
diff --git a/setup.cfg b/setup.cfg
index 4af2462..fbe58cb 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -56,6 +56,7 @@ classifiers =
     Programming Language :: Python :: 3.6
     Programming Language :: Python :: 3.7
     Programming Language :: Python :: 3.8
+    Programming Language :: Python :: 3.9
     Topic :: System :: Monitoring
 project_urls =
     Documentation=https://airflow.apache.org/docs/
@@ -106,7 +107,7 @@ install_requires =
     graphviz>=0.12
     gunicorn>=19.5.0
     httpx
-    importlib_metadata~=1.7;python_version<"3.9" # We could work with 3.1, but argparse needs <2
+    importlib_metadata>=1.7;python_version<"3.9"
     importlib_resources~=1.4
     # Required by vendored-in connexion
     inflection>=0.3.1
diff --git a/tests/bats/breeze/test_breeze_complete.bats b/tests/bats/breeze/test_breeze_complete.bats
index 249a493..787fc5b 100644
--- a/tests/bats/breeze/test_breeze_complete.bats
+++ b/tests/bats/breeze/test_breeze_complete.bats
@@ -25,7 +25,7 @@
   source "${AIRFLOW_SOURCES}/breeze-complete"
 
   breeze_complete::get_known_values_breeze "-p"
-  assert_equal "${_breeze_known_values}" "3.6 3.7 3.8"
+  assert_equal "${_breeze_known_values}" "3.6 3.7 3.8 3.9"
 }
 
 @test "Test get_known_values long" {
@@ -34,7 +34,7 @@
   source "${AIRFLOW_SOURCES}/breeze-complete"
 
   breeze_complete::get_known_values_breeze "--python"
-  assert_equal "${_breeze_known_values}" "3.6 3.7 3.8"
+  assert_equal "${_breeze_known_values}" "3.6 3.7 3.8 3.9"
 }
 
 @test "Test wrong get_known_values" {
@@ -125,7 +125,7 @@
   COMP_WORDS=("--python" "")
   breeze_complete::_comp_breeze
 
-  assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8"
+  assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8 3.9"
 }
 
 @test "Test autocomplete --python with prefix" {
@@ -136,7 +136,7 @@
   COMP_WORDS=("--python" "3")
   breeze_complete::_comp_breeze
 
-  assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8"
+  assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8 3.9"
 }
 
 @test "Test autocomplete build-" {
diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py
index 778cf08..23640a8 100644
--- a/tests/plugins/test_plugins_manager.py
+++ b/tests/plugins/test_plugins_manager.py
@@ -30,8 +30,24 @@ from airflow.www import app as application
 from tests.test_utils.config import conf_vars
 from tests.test_utils.mock_plugins import mock_plugin_manager
 
-py39 = sys.version_info >= (3, 9)
-importlib_metadata = 'importlib.metadata' if py39 else 'importlib_metadata'
+importlib_metadata_string = 'importlib_metadata'
+
+try:
+    import importlib_metadata  # pylint: disable=unused-import
+
+    # If importlib_metadata is installed, it takes precedence over built-in importlib.metadata in PY39
+    # so we should use the default declared above
+except ImportError:
+    try:
+        import importlib.metadata  # pylint: disable=unused-import
+
+        # only when we do not have importlib_metadata, the importlib.metadata is actually used
+        importlib_metadata = 'importlib.metadata'
+    except ImportError:
+        raise Exception(
+            "Either importlib_metadata must be installed or importlib.metadata must be"
+            " available in system libraries (Python 3.9+). We seem to have neither."
+        )
 
 ON_LOAD_EXCEPTION_PLUGIN = """
 from airflow.plugins_manager import AirflowPlugin
@@ -283,9 +299,9 @@ class TestPluginsManager:
         mock_entrypoint.load.side_effect = ImportError('my_fake_module not found')
         mock_dist.entry_points = [mock_entrypoint]
 
-        with mock.patch(f'{importlib_metadata}.distributions', return_value=[mock_dist]), caplog.at_level(
-            logging.ERROR, logger='airflow.plugins_manager'
-        ):
+        with mock.patch(
+            f'{importlib_metadata_string}.distributions', return_value=[mock_dist]
+        ), caplog.at_level(logging.ERROR, logger='airflow.plugins_manager'):
             load_entrypoint_plugins()
 
             received_logs = caplog.text
@@ -358,7 +374,7 @@ class TestEntryPointSource:
         mock_dist.version = '1.0.0'
         mock_dist.entry_points = [mock_entrypoint]
 
-        with mock.patch(f'{importlib_metadata}.distributions', return_value=[mock_dist]):
+        with mock.patch(f'{importlib_metadata_string}.distributions', return_value=[mock_dist]):
             plugins_manager.load_entrypoint_plugins()
 
         source = plugins_manager.EntryPointSource(mock_entrypoint, mock_dist)
diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py
index 179dbcc..94ff8b7 100644
--- a/tests/providers/apache/hive/hooks/test_hive.py
+++ b/tests/providers/apache/hive/hooks/test_hive.py
@@ -28,6 +28,7 @@ import pandas as pd
 import pytest
 from hmsclient import HMSClient
 
+from airflow import PY39
 from airflow.exceptions import AirflowException
 from airflow.models.connection import Connection
 from airflow.models.dag import DAG
@@ -44,6 +45,12 @@ DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
 DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveEnvironment(unittest.TestCase):
     def setUp(self):
         self.next_day = (DEFAULT_DATE + datetime.timedelta(days=1)).isoformat()[:10]
@@ -58,6 +65,12 @@ class TestHiveEnvironment(unittest.TestCase):
             self.hook = HiveMetastoreHook()
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveCliHook(unittest.TestCase):
     @mock.patch('tempfile.tempdir', '/tmp/')
     @mock.patch('tempfile._RandomNameSequence.__next__')
@@ -324,6 +337,12 @@ class TestHiveCliHook(unittest.TestCase):
         assert_equal_ignore_multiple_spaces(self, mock_run_cli.call_args_list[0][0][0], query)
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveMetastoreHook(TestHiveEnvironment):
     VALID_FILTER_MAP = {'key2': 'value2'}
 
@@ -549,6 +568,12 @@ class TestHiveMetastoreHook(TestHiveEnvironment):
         assert metastore_mock.drop_partition(self.table, db=self.database, part_vals=[DEFAULT_DATE_DS]), ret
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveServer2Hook(unittest.TestCase):
     def _upload_dataframe(self):
         df = pd.DataFrame({'a': [1, 2], 'b': [1, 2]})
@@ -797,6 +822,12 @@ class TestHiveServer2Hook(unittest.TestCase):
         assert 'test_dag_run_id' in output
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveCli(unittest.TestCase):
     def setUp(self):
         self.nondefault_schema = "nondefault"
diff --git a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
index c1fddd2..e85595f 100644
--- a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
+++ b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
@@ -20,6 +20,9 @@ import re
 import unittest
 from unittest.mock import MagicMock, patch
 
+import pytest
+
+from airflow import PY39
 from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
 from airflow.utils import timezone
 from airflow.utils.operator_helpers import context_to_airflow_vars
@@ -29,6 +32,12 @@ from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockMySqlHook
 DEFAULT_DATE = timezone.datetime(2015, 1, 1)
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveToMySqlTransfer(TestHiveEnvironment):
     def setUp(self):
         self.kwargs = dict(
diff --git a/tests/providers/apache/hive/transfers/test_hive_to_samba.py b/tests/providers/apache/hive/transfers/test_hive_to_samba.py
index 26c3329..c2a7cde 100644
--- a/tests/providers/apache/hive/transfers/test_hive_to_samba.py
+++ b/tests/providers/apache/hive/transfers/test_hive_to_samba.py
@@ -19,12 +19,21 @@ import os
 import unittest
 from unittest.mock import MagicMock, Mock, PropertyMock, patch
 
+import pytest
+
+from airflow import PY39
 from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator
 from airflow.utils.operator_helpers import context_to_airflow_vars
 from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
 from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockSambaHook
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHive2SambaOperator(TestHiveEnvironment):
     def setUp(self):
         self.kwargs = dict(
diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
index 99455da..881ea13 100644
--- a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
@@ -21,7 +21,9 @@ import unittest
 from collections import OrderedDict
 from unittest.mock import Mock, PropertyMock, patch
 
-from airflow import PY38
+import pytest
+
+from airflow import PY38, PY39
 
 if PY38:
     MsSqlToHiveTransferOperator: None = None
@@ -34,8 +36,14 @@ except ImportError:
     pymssql = None
 
 
-@unittest.skipIf(PY38, "Mssql package not available when Python >= 3.8.")
-@unittest.skipIf(pymssql is None, 'pymssql package not present')
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
+@pytest.mark.skipif(PY38, reason="Mssql package not available when Python >= 3.8.")
+@pytest.mark.skipif(pymssql is None, reason='pymssql package not present')
 class TestMsSqlToHiveTransfer(unittest.TestCase):
     def setUp(self):
         self.kwargs = dict(sql='sql', hive_table='table', task_id='test_mssql_to_hive', dag=None)
diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
index 1b7e2cd..0bc5996 100644
--- a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
@@ -24,6 +24,7 @@ from unittest import mock
 
 import pytest
 
+from airflow import PY39
 from airflow.models.dag import DAG
 from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator
 from airflow.providers.mysql.hooks.mysql import MySqlHook
@@ -58,6 +59,12 @@ class HiveopTempDir:
         return tail.startswith("airflow_hiveop_")
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 @pytest.mark.backend("mysql")
 class TestTransfer(unittest.TestCase):
     def setUp(self):
diff --git a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py
index 19d7d52..cf81a1a 100644
--- a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py
+++ b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py
@@ -47,7 +47,7 @@ from elasticsearch.exceptions import NotFoundError
 from .utilities import get_random_id
 
 
-# pylint: disable=redefined-builtin, unused-argument
+# pylint: disable=redefined-builtin,unused-argument
 class FakeElasticsearch(Elasticsearch):
     __documents_dict = None
 
diff --git a/tests/sensors/test_base.py b/tests/sensors/test_base.py
index c8f3823..c097f7e 100644
--- a/tests/sensors/test_base.py
+++ b/tests/sensors/test_base.py
@@ -15,6 +15,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+# pylint: disable=no-member
 
 # pylint: disable=no-member
 
diff --git a/tests/sensors/test_smart_sensor_operator.py b/tests/sensors/test_smart_sensor_operator.py
index c483ed1..27effc1 100644
--- a/tests/sensors/test_smart_sensor_operator.py
+++ b/tests/sensors/test_smart_sensor_operator.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+# pylint: disable=no-member
+
 
 # pylint: disable=no-member