You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/10/24 19:06:06 UTC

[airflow] branch main updated: Move min airflow version to 2.3.0 for all providers (#27196)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 78b8ea2f22 Move min airflow version to 2.3.0 for all providers (#27196)
78b8ea2f22 is described below

commit 78b8ea2f22239db3ef9976301234a66e50b47a94
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Oct 24 21:05:58 2022 +0200

    Move min airflow version to 2.3.0 for all providers (#27196)
    
    As of October 11 our providers are supposed to be compatible with
    Airflow 2.3+ and all code for backwards compatibility with Airflow 2.2
    can be removed now.
---
 .github/workflows/ci.yml                           |  34 ++-
 .pre-commit-config.yaml                            |   7 +-
 README.md                                          |   4 +-
 STATIC_CODE_CHECKS.rst                             |   4 +-
 airflow/operators/email.py                         |   2 +-
 airflow/providers/airbyte/provider.yaml            |   2 +-
 airflow/providers/alibaba/provider.yaml            |   2 +-
 airflow/providers/amazon/aws/links/base_aws.py     |  21 +-
 airflow/providers/amazon/aws/operators/appflow.py  |  12 +-
 .../providers/amazon/aws/operators/redshift_sql.py |   6 +-
 .../amazon/aws/utils/connection_wrapper.py         |  10 +-
 airflow/providers/amazon/provider.yaml             |   2 +-
 airflow/providers/apache/beam/provider.yaml        |   2 +-
 airflow/providers/apache/cassandra/provider.yaml   |   2 +-
 airflow/providers/apache/drill/provider.yaml       |   2 +-
 airflow/providers/apache/druid/provider.yaml       |   2 +-
 airflow/providers/apache/hdfs/provider.yaml        |   2 +-
 airflow/providers/apache/hive/operators/hive.py    |   4 +-
 airflow/providers/apache/hive/provider.yaml        |   2 +-
 .../apache/hive/transfers/hive_to_mysql.py         |   8 +-
 .../apache/hive/transfers/mssql_to_hive.py         |   4 +-
 airflow/providers/apache/kylin/provider.yaml       |   2 +-
 airflow/providers/apache/livy/provider.yaml        |   2 +-
 airflow/providers/apache/pig/provider.yaml         |   2 +-
 airflow/providers/apache/pinot/provider.yaml       |   2 +-
 .../providers/apache/spark/hooks/spark_submit.py   |   4 +-
 airflow/providers/apache/spark/provider.yaml       |   2 +-
 airflow/providers/apache/sqoop/provider.yaml       |   2 +-
 airflow/providers/arangodb/provider.yaml           |   2 +-
 airflow/providers/asana/provider.yaml              |   2 +-
 airflow/providers/atlassian/jira/provider.yaml     |   2 +-
 airflow/providers/celery/provider.yaml             |   2 +-
 airflow/providers/cloudant/provider.yaml           |   2 +-
 .../cncf/kubernetes/operators/kubernetes_pod.py    |   3 +-
 airflow/providers/common/sql/hooks/sql.py          |  24 ---
 airflow/providers/common/sql/operators/sql.py      |  13 +-
 airflow/providers/common/sql/sensors/sql.py        |  13 +-
 .../providers/databricks/hooks/databricks_sql.py   |   9 +-
 .../providers/databricks/operators/databricks.py   |  18 +-
 airflow/providers/databricks/provider.yaml         |   2 +-
 .../providers/databricks/triggers/databricks.py    |  12 +-
 airflow/providers/datadog/provider.yaml            |   2 +-
 airflow/providers/dbt/cloud/operators/dbt.py       |  12 +-
 airflow/providers/dbt/cloud/provider.yaml          |   2 +-
 airflow/providers/dingding/provider.yaml           |   2 +-
 airflow/providers/discord/provider.yaml            |   2 +-
 airflow/providers/elasticsearch/provider.yaml      |   2 +-
 airflow/providers/exasol/provider.yaml             |   2 +-
 airflow/providers/facebook/provider.yaml           |   2 +-
 airflow/providers/github/provider.yaml             |   2 +-
 airflow/providers/google/cloud/links/base.py       |  19 +-
 airflow/providers/google/cloud/links/dataproc.py   |  33 +--
 .../providers/google/cloud/operators/bigquery.py   |  32 +--
 .../google/cloud/operators/dataproc_metastore.py   |  35 +---
 .../google/cloud/triggers/cloud_composer.py        |  12 +-
 airflow/providers/google/leveldb/hooks/leveldb.py  |  16 +-
 airflow/providers/google/provider.yaml             |   2 +-
 airflow/providers/grpc/provider.yaml               |   2 +-
 airflow/providers/hashicorp/provider.yaml          |   2 +-
 airflow/providers/influxdb/provider.yaml           |   2 +-
 airflow/providers/jdbc/provider.yaml               |   2 +-
 airflow/providers/jenkins/provider.yaml            |   2 +-
 .../microsoft/azure/operators/data_factory.py      |  22 +-
 airflow/providers/microsoft/azure/provider.yaml    |   2 +-
 .../providers/microsoft/mssql/operators/mssql.py   |   4 +-
 airflow/providers/microsoft/mssql/provider.yaml    |   2 +-
 airflow/providers/microsoft/psrp/operators/psrp.py |   7 +-
 airflow/providers/microsoft/winrm/provider.yaml    |   2 +-
 airflow/providers/mongo/provider.yaml              |   2 +-
 airflow/providers/mysql/operators/mysql.py         |   4 +-
 airflow/providers/mysql/provider.yaml              |   2 +-
 .../providers/mysql/transfers/presto_to_mysql.py   |   4 +-
 .../providers/mysql/transfers/trino_to_mysql.py    |   4 +-
 .../providers/mysql/transfers/vertica_to_mysql.py  |   8 +-
 airflow/providers/neo4j/provider.yaml              |   2 +-
 airflow/providers/odbc/provider.yaml               |   2 +-
 airflow/providers/openfaas/provider.yaml           |   2 +-
 airflow/providers/opsgenie/provider.yaml           |   2 +-
 airflow/providers/oracle/hooks/oracle.py           |  16 +-
 airflow/providers/oracle/provider.yaml             |   2 +-
 airflow/providers/pagerduty/provider.yaml          |   2 +-
 airflow/providers/papermill/provider.yaml          |   2 +-
 airflow/providers/plexus/provider.yaml             |   2 +-
 airflow/providers/postgres/operators/postgres.py   |   6 +-
 airflow/providers/postgres/provider.yaml           |   2 +-
 airflow/providers/presto/hooks/presto.py           |   5 +-
 airflow/providers/presto/provider.yaml             |   2 +-
 airflow/providers/qubole/hooks/qubole.py           |   4 +-
 airflow/providers/qubole/operators/qubole.py       |  14 +-
 airflow/providers/qubole/provider.yaml             |   2 +-
 airflow/providers/redis/provider.yaml              |   2 +-
 airflow/providers/salesforce/provider.yaml         |   2 +-
 airflow/providers/samba/provider.yaml              |   2 +-
 airflow/providers/segment/provider.yaml            |   2 +-
 airflow/providers/sendgrid/provider.yaml           |   2 +-
 airflow/providers/sftp/provider.yaml               |   2 +-
 airflow/providers/singularity/provider.yaml        |   2 +-
 airflow/providers/slack/provider.yaml              |   2 +-
 airflow/providers/slack/transfers/sql_to_slack.py  |  12 +-
 airflow/providers/slack/utils/__init__.py          |   9 +-
 airflow/providers/snowflake/hooks/snowflake.py     |   9 +-
 airflow/providers/snowflake/provider.yaml          |   2 +-
 airflow/providers/ssh/provider.yaml                |   2 +-
 airflow/providers/tableau/provider.yaml            |   2 +-
 airflow/providers/tabular/provider.yaml            |   2 +-
 airflow/providers/telegram/provider.yaml           |   2 +-
 airflow/providers/trino/hooks/trino.py             |  23 +-
 airflow/providers/trino/provider.yaml              |   2 +-
 airflow/providers/vertica/provider.yaml            |   2 +-
 airflow/providers/yandex/provider.yaml             |   2 +-
 airflow/providers/zendesk/provider.yaml            |   2 +-
 dev/breeze/src/airflow_breeze/pre_commit_ids.py    |   2 +-
 .../howto/create-update-providers.rst              |   2 +-
 docs/apache-airflow/howto/define_extra_link.rst    |   6 +-
 generated/provider_dependencies.json               | 136 ++++++------
 images/breeze/output-commands-hash.txt             |   2 +-
 images/breeze/output_static-checks.svg             | 232 ++++++++++-----------
 .../pre_commit_check_2_2_compatibility.py          | 115 ----------
 ..._commit_check_provider_airflow_compatibility.py |  64 ++++++
 scripts/in_container/verify_providers.py           |   5 +
 tests/providers/snowflake/hooks/test_snowflake.py  |  14 +-
 121 files changed, 442 insertions(+), 794 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1716be16b1..0f7bb2248f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -884,32 +884,22 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
         run: |
           pipx install twine
           twine check dist/*.whl
-      - name: "Remove airflow package and replace providers with 2.2-compliant versions"
+      - name: "Remove airflow package and replace providers with 2.3-compliant versions"
         run: |
           rm -vf dist/apache_airflow-*.whl \
-             dist/apache_airflow_providers_cncf_kubernetes*.whl \
-             dist/apache_airflow_providers_celery*.whl
+             dist/apache_airflow_providers_docker*.whl
           pip download --no-deps --dest dist \
-              apache-airflow-providers-cncf-kubernetes==3.0.0 \
-              apache-airflow-providers-celery==2.1.3
-      - name: "Install and test provider packages and airflow on Airflow 2.2 files"
+              apache-airflow-providers-docker==3.1.0
+      - name: "Get all provider extras as AIRFLOW_EXTRAS evn variable"
         run: >
-          breeze release-management verify-provider-packages --use-airflow-version 2.2.0
-          --use-packages-from-dist --package-format wheel --airflow-constraints-reference constraints-2.2.0
-        env:
-          # The extras below are all extras that should be installed with Airflow 2.2.0
-          AIRFLOW_EXTRAS: "airbyte,alibaba,amazon,apache.atlas,apache.beam,apache.cassandra,apache.drill,\
-            apache.druid,apache.hdfs,apache.hive,apache.kylin,apache.livy,apache.pig,apache.pinot,\
-            apache.spark,apache.sqoop,apache.webhdfs,asana,async,\
-            celery,cgroups,cloudant,cncf.kubernetes,dask,databricks,datadog,\
-            deprecated_api,dingding,discord,docker,\
-            elasticsearch,exasol,facebook,ftp,github_enterprise,google,google_auth,\
-            grpc,hashicorp,http,imap,influxdb,jdbc,jenkins,jira,kerberos,ldap,\
-            leveldb,microsoft.azure,microsoft.mssql,microsoft.psrp,microsoft.winrm,mongo,mysql,\
-            neo4j,odbc,openfaas,opsgenie,oracle,pagerduty,pandas,papermill,password,plexus,\
-            postgres,presto,qubole,rabbitmq,redis,salesforce,samba,segment,sendgrid,sentry,\
-            sftp,singularity,slack,snowflake,sqlite,ssh,statsd,tableau,telegram,trino,vertica,\
-            virtualenv,yandex,zendesk"
+          python -c 'from pathlib import Path; import json;
+          providers = json.loads(Path("generated/provider_dependencies.json").read_text());
+          provider_keys = ",".join(providers.keys());
+          print("AIRFLOW_EXTRAS={}".format(provider_keys))' >> $GITHUB_ENV
+      - name: "Install and test provider packages and airflow on Airflow 2.3 files"
+        run: >
+          breeze release-management verify-provider-packages --use-airflow-version 2.3.0
+          --use-packages-from-dist --package-format wheel --airflow-constraints-reference constraints-2.3.0
       - name: "Fix ownership"
         run: breeze ci fix-ownership
         if: always()
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 138c8110de..2bc2017e22 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -360,10 +360,9 @@ repos:
         pass_filenames: false
         entry: ./scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py
         additional_dependencies: ['rich>=12.4.4']
-        # This check might be removed when min-airflow-version in providers is 2.2
-      - id: check-airflow-2-2-compatibility
-        name: Check that providers are 2.2 compatible.
-        entry: ./scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py
+      - id: check-airflow-provider-compatibility
+        name: Check compatibility of Providers with Airflow
+        entry: ./scripts/ci/pre_commit/pre_commit_check_provider_airflow_compatibility.py
         language: python
         pass_filenames: true
         files: ^airflow/providers/.*\.py$
diff --git a/README.md b/README.md
index 895f17d7f7..a9e697fca2 100644
--- a/README.md
+++ b/README.md
@@ -407,8 +407,8 @@ that we increase the minimum Airflow version, when 12 months passed since the
 first release for the MINOR version of Airflow.
 
 For example this means that by default we upgrade the minimum version of Airflow supported by providers
-to 2.3.0 in the first Provider's release after 11th of October 2022 (11th of October 2021 is the date when the
-first `PATCHLEVEL` of 2.2 (2.2.0) has been released.
+to 2.4.0 in the first Provider's release after 30th of April 2023. The 30th of April 2022 is the date when the
+first `PATCHLEVEL` of 2.3 (2.3.0) has been released.
 
 Providers are often connected with some stakeholders that are vitally interested in maintaining backwards
 compatibilities in their integrations (for example cloud providers, or specific service providers). But,
diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst
index 7fd5c6a919..747abb58e3 100644
--- a/STATIC_CODE_CHECKS.rst
+++ b/STATIC_CODE_CHECKS.rst
@@ -138,10 +138,10 @@ require Breeze Docker image to be build locally.
 +--------------------------------------------------------+------------------------------------------------------------------+---------+
 | blacken-docs                                           | Run black on python code blocks in documentation files           |         |
 +--------------------------------------------------------+------------------------------------------------------------------+---------+
-| check-airflow-2-2-compatibility                        | Check that providers are 2.2 compatible.                         |         |
-+--------------------------------------------------------+------------------------------------------------------------------+---------+
 | check-airflow-config-yaml-consistent                   | Checks for consistency between config.yml and default_config.cfg |         |
 +--------------------------------------------------------+------------------------------------------------------------------+---------+
+| check-airflow-provider-compatibility                   | Check compatibility of Providers with Airflow                    |         |
++--------------------------------------------------------+------------------------------------------------------------------+---------+
 | check-apache-license-rat                               | Check if licenses are OK for Apache                              |         |
 +--------------------------------------------------------+------------------------------------------------------------------+---------+
 | check-base-operator-partial-arguments                  | Check BaseOperator and partial() arguments                       |         |
diff --git a/airflow/operators/email.py b/airflow/operators/email.py
index d0016f7f49..1d310f63fe 100644
--- a/airflow/operators/email.py
+++ b/airflow/operators/email.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 from typing import Any, Sequence
 
-from airflow.models import BaseOperator
+from airflow.models.baseoperator import BaseOperator
 from airflow.utils.context import Context
 from airflow.utils.email import send_email
 
diff --git a/airflow/providers/airbyte/provider.yaml b/airflow/providers/airbyte/provider.yaml
index 94f6a2ccac..05e04b8983 100644
--- a/airflow/providers/airbyte/provider.yaml
+++ b/airflow/providers/airbyte/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-http
 
 integrations:
diff --git a/airflow/providers/alibaba/provider.yaml b/airflow/providers/alibaba/provider.yaml
index b52d598f4c..f89d4eaa20 100644
--- a/airflow/providers/alibaba/provider.yaml
+++ b/airflow/providers/alibaba/provider.yaml
@@ -31,7 +31,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - oss2>=2.14.0
 
 integrations:
diff --git a/airflow/providers/amazon/aws/links/base_aws.py b/airflow/providers/amazon/aws/links/base_aws.py
index 973b746eeb..fba2f17e96 100644
--- a/airflow/providers/amazon/aws/links/base_aws.py
+++ b/airflow/providers/amazon/aws/links/base_aws.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-from datetime import datetime
 from typing import TYPE_CHECKING, ClassVar
 
 from airflow.models import BaseOperatorLink, XCom
@@ -63,30 +62,18 @@ class BaseAwsLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm: datetime | None = None,
-        ti_key: TaskInstanceKey | None = None,
+        operator: BaseOperator,
+        *,
+        ti_key: TaskInstanceKey,
     ) -> str:
         """
         Link to Amazon Web Services Console.
 
         :param operator: airflow operator
         :param ti_key: TaskInstance ID to return link for
-        :param dttm: execution date. Uses for compatibility with Airflow 2.2
         :return: link to external system
         """
-        if ti_key is not None:
-            conf = XCom.get_value(key=self.key, ti_key=ti_key)
-        elif not dttm:
-            conf = {}
-        else:
-            conf = XCom.get_one(
-                key=self.key,
-                dag_id=operator.dag.dag_id,
-                task_id=operator.task_id,
-                execution_date=dttm,
-            )
-
+        conf = XCom.get_value(key=self.key, ti_key=ti_key)
         return self.format_link(**conf) if conf else ""
 
     @classmethod
diff --git a/airflow/providers/amazon/aws/operators/appflow.py b/airflow/providers/amazon/aws/operators/appflow.py
index c7e16387ed..b077b04bb5 100644
--- a/airflow/providers/amazon/aws/operators/appflow.py
+++ b/airflow/providers/amazon/aws/operators/appflow.py
@@ -24,7 +24,7 @@ from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
 from airflow.operators.python import ShortCircuitOperator
 from airflow.providers.amazon.aws.hooks.appflow import AppflowHook
-from airflow.providers.amazon.aws.utils import datetime_to_epoch_ms, get_airflow_version
+from airflow.providers.amazon.aws.utils import datetime_to_epoch_ms
 
 if TYPE_CHECKING:
     from mypy_boto3_appflow.type_defs import (
@@ -400,7 +400,7 @@ class AppflowRecordsShortCircuitOperator(ShortCircuitOperator):
 
     :param flow_name: The flow name
     :param appflow_run_task_id: Run task ID from where this operator should extract the execution ID
-    :param ignore_downstream_trigger_rules: Ignore downstream trigger rules (Ignored for Airflow < 2.3)
+    :param ignore_downstream_trigger_rules: Ignore downstream trigger rules
     :param aws_conn_id: aws connection to use
     :param region: aws region to use
     """
@@ -417,19 +417,13 @@ class AppflowRecordsShortCircuitOperator(ShortCircuitOperator):
         region: str | None = None,
         **kwargs,
     ) -> None:
-        if get_airflow_version() >= (2, 3):
-            kwargs["ignore_downstream_trigger_rules"] = ignore_downstream_trigger_rules
-        else:
-            self.log.warning(
-                "Ignoring argument ignore_downstream_trigger_rules (%s) - Only supported for Airflow >= 2.3",
-                ignore_downstream_trigger_rules,
-            )
         super().__init__(
             python_callable=self._has_new_records_func,
             op_kwargs={
                 "flow_name": flow_name,
                 "appflow_run_task_id": appflow_run_task_id,
             },
+            ignore_downstream_trigger_rules=ignore_downstream_trigger_rules,
             **kwargs,
         )
         self.aws_conn_id = aws_conn_id
diff --git a/airflow/providers/amazon/aws/operators/redshift_sql.py b/airflow/providers/amazon/aws/operators/redshift_sql.py
index 93534719af..af93c5f6cf 100644
--- a/airflow/providers/amazon/aws/operators/redshift_sql.py
+++ b/airflow/providers/amazon/aws/operators/redshift_sql.py
@@ -20,7 +20,6 @@ import warnings
 from typing import Sequence
 
 from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
-from airflow.www import utils as wwwutils
 
 
 class RedshiftSQLOperator(SQLExecuteQueryOperator):
@@ -46,10 +45,7 @@ class RedshiftSQLOperator(SQLExecuteQueryOperator):
         "redshift_conn_id",
     )
     template_ext: Sequence[str] = (".sql",)
-    # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
-    template_fields_renderers = {
-        "sql": "postgresql" if "postgresql" in wwwutils.get_attr_renderer() else "sql"
-    }
+    template_fields_renderers = {"sql": "postgresql"}
 
     def __init__(self, *, redshift_conn_id: str = "redshift_default", **kwargs) -> None:
         super().__init__(conn_id=redshift_conn_id, **kwargs)
diff --git a/airflow/providers/amazon/aws/utils/connection_wrapper.py b/airflow/providers/amazon/aws/utils/connection_wrapper.py
index 5892bf1176..f6ae3fdf01 100644
--- a/airflow/providers/amazon/aws/utils/connection_wrapper.py
+++ b/airflow/providers/amazon/aws/utils/connection_wrapper.py
@@ -29,15 +29,7 @@ from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.utils import trim_none_values
 from airflow.utils.log.logging_mixin import LoggingMixin
 from airflow.utils.log.secrets_masker import mask_secret
-
-try:
-    from airflow.utils.types import NOTSET, ArgNotSet
-except ImportError:  # TODO: Remove when the provider has an Airflow 2.3+ requirement.
-
-    class ArgNotSet:  # type: ignore[no-redef]
-        """Sentinel type for annotations, useful when None is not viable."""
-
-    NOTSET = ArgNotSet()
+from airflow.utils.types import NOTSET, ArgNotSet
 
 if TYPE_CHECKING:
     from airflow.models.connection import Connection  # Avoid circular imports.
diff --git a/airflow/providers/amazon/provider.yaml b/airflow/providers/amazon/provider.yaml
index ad645239a3..aaa65a2718 100644
--- a/airflow/providers/amazon/provider.yaml
+++ b/airflow/providers/amazon/provider.yaml
@@ -46,7 +46,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - boto3>=1.15.0
   # watchtower 3 has been released end Jan and introduced breaking change across the board that might
diff --git a/airflow/providers/apache/beam/provider.yaml b/airflow/providers/apache/beam/provider.yaml
index f106e53476..c7655bafb8 100644
--- a/airflow/providers/apache/beam/provider.yaml
+++ b/airflow/providers/apache/beam/provider.yaml
@@ -35,7 +35,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-beam>=2.39.0
 
 integrations:
diff --git a/airflow/providers/apache/cassandra/provider.yaml b/airflow/providers/apache/cassandra/provider.yaml
index c376066d47..4891abeaf5 100644
--- a/airflow/providers/apache/cassandra/provider.yaml
+++ b/airflow/providers/apache/cassandra/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - cassandra-driver>=3.13.0
 
 integrations:
diff --git a/airflow/providers/apache/drill/provider.yaml b/airflow/providers/apache/drill/provider.yaml
index c94df8458c..bc09b07152 100644
--- a/airflow/providers/apache/drill/provider.yaml
+++ b/airflow/providers/apache/drill/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - sqlalchemy-drill>=1.1.0
 
diff --git a/airflow/providers/apache/druid/provider.yaml b/airflow/providers/apache/druid/provider.yaml
index d932b2c441..8e7367a1e8 100644
--- a/airflow/providers/apache/druid/provider.yaml
+++ b/airflow/providers/apache/druid/provider.yaml
@@ -40,7 +40,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   - pydruid>=0.4.1
 
diff --git a/airflow/providers/apache/hdfs/provider.yaml b/airflow/providers/apache/hdfs/provider.yaml
index f4632614ef..fccf2475b2 100644
--- a/airflow/providers/apache/hdfs/provider.yaml
+++ b/airflow/providers/apache/hdfs/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - snakebite-py3
   - hdfs[avro,dataframe,kerberos]>=2.0.4
 
diff --git a/airflow/providers/apache/hive/operators/hive.py b/airflow/providers/apache/hive/operators/hive.py
index 30627b22ca..23f6c32edd 100644
--- a/airflow/providers/apache/hive/operators/hive.py
+++ b/airflow/providers/apache/hive/operators/hive.py
@@ -103,13 +103,11 @@ class HiveOperator(BaseOperator):
         self.mapred_queue_priority = mapred_queue_priority
         self.mapred_job_name = mapred_job_name
 
-        job_name_template = conf.get(
+        job_name_template = conf.get_mandatory_value(
             "hive",
             "mapred_job_name_template",
             fallback="Airflow HiveOperator task for {hostname}.{dag_id}.{task_id}.{execution_date}",
         )
-        if job_name_template is None:
-            raise ValueError("Job name template should be set !")
         self.mapred_job_name_template: str = job_name_template
 
         # assigned lazily - just for consistency we can create the attribute with a
diff --git a/airflow/providers/apache/hive/provider.yaml b/airflow/providers/apache/hive/provider.yaml
index 9c32c78e18..5c359460e0 100644
--- a/airflow/providers/apache/hive/provider.yaml
+++ b/airflow/providers/apache/hive/provider.yaml
@@ -42,7 +42,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   - hmsclient>=0.1.0
   - pandas>=0.17.1
diff --git a/airflow/providers/apache/hive/transfers/hive_to_mysql.py b/airflow/providers/apache/hive/transfers/hive_to_mysql.py
index c8eacd5111..b1a3669d71 100644
--- a/airflow/providers/apache/hive/transfers/hive_to_mysql.py
+++ b/airflow/providers/apache/hive/transfers/hive_to_mysql.py
@@ -25,14 +25,10 @@ from airflow.models import BaseOperator
 from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
 from airflow.providers.mysql.hooks.mysql import MySqlHook
 from airflow.utils.operator_helpers import context_to_airflow_vars
-from airflow.www import utils as wwwutils
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
 
-# TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
-MYSQL_RENDERER = "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql"
-
 
 class HiveToMySqlOperator(BaseOperator):
     """
@@ -64,8 +60,8 @@ class HiveToMySqlOperator(BaseOperator):
     template_ext: Sequence[str] = (".sql",)
     template_fields_renderers = {
         "sql": "hql",
-        "mysql_preoperator": MYSQL_RENDERER,
-        "mysql_postoperator": MYSQL_RENDERER,
+        "mysql_preoperator": "mysql",
+        "mysql_postoperator": "mysql",
     }
     ui_color = "#a0e08c"
 
diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
index 1c2c437d18..9cdd581911 100644
--- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
@@ -28,7 +28,6 @@ import unicodecsv as csv
 from airflow.models import BaseOperator
 from airflow.providers.apache.hive.hooks.hive import HiveCliHook
 from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
-from airflow.www import utils as wwwutils
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
@@ -66,8 +65,7 @@ class MsSqlToHiveOperator(BaseOperator):
 
     template_fields: Sequence[str] = ("sql", "partition", "hive_table")
     template_ext: Sequence[str] = (".sql",)
-    # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
-    template_fields_renderers = {"sql": "tsql" if "tsql" in wwwutils.get_attr_renderer() else "sql"}
+    template_fields_renderers = {"sql": "tsql"}
     ui_color = "#a0e08c"
 
     def __init__(
diff --git a/airflow/providers/apache/kylin/provider.yaml b/airflow/providers/apache/kylin/provider.yaml
index fbdbf9ef00..62a64074fc 100644
--- a/airflow/providers/apache/kylin/provider.yaml
+++ b/airflow/providers/apache/kylin/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - kylinpy>=2.6
 
 integrations:
diff --git a/airflow/providers/apache/livy/provider.yaml b/airflow/providers/apache/livy/provider.yaml
index 4c4eb3c178..0290564dfb 100644
--- a/airflow/providers/apache/livy/provider.yaml
+++ b/airflow/providers/apache/livy/provider.yaml
@@ -35,7 +35,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-http
 
 integrations:
diff --git a/airflow/providers/apache/pig/provider.yaml b/airflow/providers/apache/pig/provider.yaml
index 8b1ad67daa..998029c78b 100644
--- a/airflow/providers/apache/pig/provider.yaml
+++ b/airflow/providers/apache/pig/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
 
 integrations:
   - integration-name: Apache Pig
diff --git a/airflow/providers/apache/pinot/provider.yaml b/airflow/providers/apache/pinot/provider.yaml
index 6493795635..d4f0c266b5 100644
--- a/airflow/providers/apache/pinot/provider.yaml
+++ b/airflow/providers/apache/pinot/provider.yaml
@@ -35,7 +35,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   - pinotdb>0.4.7
 
diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py
index 114394dc57..472f1f8cda 100644
--- a/airflow/providers/apache/spark/hooks/spark_submit.py
+++ b/airflow/providers/apache/spark/hooks/spark_submit.py
@@ -627,9 +627,7 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
                     # we still attempt to kill the yarn application
                     renew_from_kt(self._principal, self._keytab, exit_on_fail=False)
                     env = os.environ.copy()
-                    ccacche = airflow_conf.get("kerberos", "ccache")
-                    if ccacche is None:
-                        raise ValueError("The kerberos/ccache config should be set here!")
+                    ccacche = airflow_conf.get_mandatory_value("kerberos", "ccache")
                     env["KRB5CCNAME"] = ccacche
 
                 with subprocess.Popen(
diff --git a/airflow/providers/apache/spark/provider.yaml b/airflow/providers/apache/spark/provider.yaml
index 629ed74d39..e827ad9d5c 100644
--- a/airflow/providers/apache/spark/provider.yaml
+++ b/airflow/providers/apache/spark/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - pyspark
 
 integrations:
diff --git a/airflow/providers/apache/sqoop/provider.yaml b/airflow/providers/apache/sqoop/provider.yaml
index db7e8e7e9a..ada15c3cd2 100644
--- a/airflow/providers/apache/sqoop/provider.yaml
+++ b/airflow/providers/apache/sqoop/provider.yaml
@@ -34,7 +34,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
 
 integrations:
   - integration-name: Apache Sqoop
diff --git a/airflow/providers/arangodb/provider.yaml b/airflow/providers/arangodb/provider.yaml
index 09cc3f27f5..0932447bfe 100644
--- a/airflow/providers/arangodb/provider.yaml
+++ b/airflow/providers/arangodb/provider.yaml
@@ -22,7 +22,7 @@ description: |
     `ArangoDB <https://www.arangodb.com/>`__
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - python-arango>=7.3.2
 
 versions:
diff --git a/airflow/providers/asana/provider.yaml b/airflow/providers/asana/provider.yaml
index 96f94715a6..a08572e978 100644
--- a/airflow/providers/asana/provider.yaml
+++ b/airflow/providers/asana/provider.yaml
@@ -31,7 +31,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - asana>=0.10
 
 integrations:
diff --git a/airflow/providers/atlassian/jira/provider.yaml b/airflow/providers/atlassian/jira/provider.yaml
index 92285d56c5..395bada494 100644
--- a/airflow/providers/atlassian/jira/provider.yaml
+++ b/airflow/providers/atlassian/jira/provider.yaml
@@ -25,7 +25,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - JIRA>1.0.7
 
 integrations:
diff --git a/airflow/providers/celery/provider.yaml b/airflow/providers/celery/provider.yaml
index 53fe5a96a9..18859d7008 100644
--- a/airflow/providers/celery/provider.yaml
+++ b/airflow/providers/celery/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   # The Celery is known to introduce problems when upgraded to a MAJOR version. Airflow Core
   # Uses Celery for CeleryExecutor, and we also know that Kubernetes Python client follows SemVer
   # (https://docs.celeryq.dev/en/stable/contributing.html?highlight=semver#versions).
diff --git a/airflow/providers/cloudant/provider.yaml b/airflow/providers/cloudant/provider.yaml
index 91225e4de6..f0630d28fb 100644
--- a/airflow/providers/cloudant/provider.yaml
+++ b/airflow/providers/cloudant/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - cloudant>=2.0
 
 integrations:
diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
index 5ae71af643..447d185ddb 100644
--- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
+++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
@@ -329,8 +329,7 @@ class KubernetesPodOperator(BaseOperator):
             "kubernetes_pod_operator": "True",
         }
 
-        # If running on Airflow 2.3+:
-        map_index = getattr(ti, "map_index", -1)
+        map_index = ti.map_index
         if map_index >= 0:
             labels["map_index"] = map_index
 
diff --git a/airflow/providers/common/sql/hooks/sql.py b/airflow/providers/common/sql/hooks/sql.py
index 6a42ca11cc..25d0ecc533 100644
--- a/airflow/providers/common/sql/hooks/sql.py
+++ b/airflow/providers/common/sql/hooks/sql.py
@@ -16,7 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-import warnings
 from contextlib import closing
 from datetime import datetime
 from typing import Any, Callable, Iterable, Mapping, Optional
@@ -28,8 +27,6 @@ from typing_extensions import Protocol
 
 from airflow import AirflowException
 from airflow.hooks.base import BaseHook
-from airflow.providers_manager import ProvidersManager
-from airflow.utils.module_loading import import_string
 from airflow.version import version
 
 
@@ -41,27 +38,6 @@ def fetch_all_handler(cursor) -> list[tuple] | None:
         return None
 
 
-def _backported_get_hook(connection, *, hook_params=None):
-    """Return hook based on conn_type
-    For supporting Airflow versions < 2.3, we backport "get_hook()" method. This should be removed
-    when "apache-airflow-providers-slack" will depend on Airflow >= 2.3.
-    """
-    hook = ProvidersManager().hooks.get(connection.conn_type, None)
-
-    if hook is None:
-        raise AirflowException(f'Unknown hook type "{connection.conn_type}"')
-    try:
-        hook_class = import_string(hook.hook_class_name)
-    except ImportError:
-        warnings.warn(
-            f"Could not import {hook.hook_class_name} when discovering {hook.hook_name} {hook.package_name}",
-        )
-        raise
-    if hook_params is None:
-        hook_params = {}
-    return hook_class(**{hook.connection_id_attribute_name: connection.conn_id}, **hook_params)
-
-
 class ConnectorProtocol(Protocol):
     """A protocol where you can connect to a database."""
 
diff --git a/airflow/providers/common/sql/operators/sql.py b/airflow/providers/common/sql/operators/sql.py
index 6330509a63..66984a802f 100644
--- a/airflow/providers/common/sql/operators/sql.py
+++ b/airflow/providers/common/sql/operators/sql.py
@@ -21,14 +21,11 @@ import ast
 import re
 from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, Sequence, SupportsAbs
 
-from packaging.version import Version
-
 from airflow.compat.functools import cached_property
 from airflow.exceptions import AirflowException
 from airflow.hooks.base import BaseHook
 from airflow.models import BaseOperator, SkipMixin
-from airflow.providers.common.sql.hooks.sql import DbApiHook, _backported_get_hook, fetch_all_handler
-from airflow.version import version
+from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
@@ -118,13 +115,7 @@ class BaseSQLOperator(BaseOperator):
         """Get DB Hook based on connection type"""
         self.log.debug("Get connection for %s", self.conn_id)
         conn = BaseHook.get_connection(self.conn_id)
-        if Version(version) >= Version("2.3"):
-            # "hook_params" were introduced to into "get_hook()" only in Airflow 2.3.
-            hook = conn.get_hook(hook_params=self.hook_params)  # ignore airflow compat check
-        else:
-            # For supporting Airflow versions < 2.3, we backport "get_hook()" method. This should be removed
-            # when "apache-airflow-providers-common-sql" will depend on Airflow >= 2.3.
-            hook = _backported_get_hook(conn, hook_params=self.hook_params)
+        hook = conn.get_hook(hook_params=self.hook_params)
         if not isinstance(hook, DbApiHook):
             from airflow.hooks.dbapi_hook import DbApiHook as _DbApiHook
 
diff --git a/airflow/providers/common/sql/sensors/sql.py b/airflow/providers/common/sql/sensors/sql.py
index 7f90b5b006..d58802dc98 100644
--- a/airflow/providers/common/sql/sensors/sql.py
+++ b/airflow/providers/common/sql/sensors/sql.py
@@ -18,13 +18,10 @@ from __future__ import annotations
 
 from typing import Any, Sequence
 
-from packaging.version import Version
-
 from airflow import AirflowException
 from airflow.hooks.base import BaseHook
-from airflow.providers.common.sql.hooks.sql import DbApiHook, _backported_get_hook
+from airflow.providers.common.sql.hooks.sql import DbApiHook
 from airflow.sensors.base import BaseSensorOperator
-from airflow.version import version
 
 
 class SqlSensor(BaseSensorOperator):
@@ -80,13 +77,7 @@ class SqlSensor(BaseSensorOperator):
 
     def _get_hook(self):
         conn = BaseHook.get_connection(self.conn_id)
-        if Version(version) >= Version("2.3"):
-            # "hook_params" were introduced to into "get_hook()" only in Airflow 2.3.
-            hook = conn.get_hook(hook_params=self.hook_params)  # ignore airflow compat check
-        else:
-            # For supporting Airflow versions < 2.3, we backport "get_hook()" method. This should be removed
-            # when "apache-airflow-providers-common-sql" will depend on Airflow >= 2.3.
-            hook = _backported_get_hook(conn, hook_params=self.hook_params)
+        hook = conn.get_hook(hook_params=self.hook_params)
         if not isinstance(hook, DbApiHook):
             raise AirflowException(
                 f"The connection type is not supported by {self.__class__.__name__}. "
diff --git a/airflow/providers/databricks/hooks/databricks_sql.py b/airflow/providers/databricks/hooks/databricks_sql.py
index c8d5ccdb20..5e456a9ca5 100644
--- a/airflow/providers/databricks/hooks/databricks_sql.py
+++ b/airflow/providers/databricks/hooks/databricks_sql.py
@@ -51,6 +51,7 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
     """
 
     hook_name = "Databricks SQL"
+    _test_connection_sql = "select 42"
 
     def __init__(
         self,
@@ -197,14 +198,6 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
         else:
             return results
 
-    def test_connection(self):
-        """Test the Databricks SQL connection by running a simple query."""
-        try:
-            self.run(sql="select 42")
-        except Exception as e:
-            return False, str(e)
-        return True, "Connection successfully checked"
-
     def bulk_dump(self, table, tmp_file):
         raise NotImplementedError()
 
diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py
index 7a2c6b6039..dbb44c7082 100644
--- a/airflow/providers/databricks/operators/databricks.py
+++ b/airflow/providers/databricks/operators/databricks.py
@@ -144,23 +144,11 @@ class DatabricksJobRunLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm=None,
+        operator: BaseOperator,
         *,
-        ti_key: TaskInstanceKey | None = None,
+        ti_key: TaskInstanceKey,
     ) -> str:
-        if ti_key is not None:
-            run_page_url = XCom.get_value(key=XCOM_RUN_PAGE_URL_KEY, ti_key=ti_key)
-        else:
-            assert dttm
-            run_page_url = XCom.get_one(
-                key=XCOM_RUN_PAGE_URL_KEY,
-                dag_id=operator.dag.dag_id,
-                task_id=operator.task_id,
-                execution_date=dttm,
-            )
-
-        return run_page_url
+        return XCom.get_value(key=XCOM_RUN_PAGE_URL_KEY, ti_key=ti_key)
 
 
 class DatabricksSubmitRunOperator(BaseOperator):
diff --git a/airflow/providers/databricks/provider.yaml b/airflow/providers/databricks/provider.yaml
index ad25de857e..429bfc33ee 100644
--- a/airflow/providers/databricks/provider.yaml
+++ b/airflow/providers/databricks/provider.yaml
@@ -40,7 +40,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - requests>=2.27,<3
   - databricks-sql-connector>=2.0.0, <3.0.0
diff --git a/airflow/providers/databricks/triggers/databricks.py b/airflow/providers/databricks/triggers/databricks.py
index 6f67f0bc6e..cd2421c376 100644
--- a/airflow/providers/databricks/triggers/databricks.py
+++ b/airflow/providers/databricks/triggers/databricks.py
@@ -18,20 +18,10 @@
 from __future__ import annotations
 
 import asyncio
-import logging
 from typing import Any
 
 from airflow.providers.databricks.hooks.databricks import DatabricksHook
-
-try:
-    from airflow.triggers.base import BaseTrigger, TriggerEvent
-except ImportError:
-    logging.getLogger(__name__).warning(
-        "Deferrable Operators only work starting Airflow 2.2",
-        exc_info=True,
-    )
-    BaseTrigger = object  # type: ignore
-    TriggerEvent = None  # type: ignore
+from airflow.triggers.base import BaseTrigger, TriggerEvent
 
 
 class DatabricksExecutionTrigger(BaseTrigger):
diff --git a/airflow/providers/datadog/provider.yaml b/airflow/providers/datadog/provider.yaml
index 5daf04bd69..2a82a08456 100644
--- a/airflow/providers/datadog/provider.yaml
+++ b/airflow/providers/datadog/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - datadog>=0.14.0
 
 integrations:
diff --git a/airflow/providers/dbt/cloud/operators/dbt.py b/airflow/providers/dbt/cloud/operators/dbt.py
index 37a723421c..6f8080097b 100644
--- a/airflow/providers/dbt/cloud/operators/dbt.py
+++ b/airflow/providers/dbt/cloud/operators/dbt.py
@@ -34,16 +34,8 @@ class DbtCloudRunJobOperatorLink(BaseOperatorLink):
 
     name = "Monitor Job Run"
 
-    def get_link(self, operator, dttm=None, *, ti_key=None):
-        if ti_key is not None:
-            job_run_url = XCom.get_value(key="job_run_url", ti_key=ti_key)
-        else:
-            assert dttm
-            job_run_url = XCom.get_one(
-                dag_id=operator.dag.dag_id, task_id=operator.task_id, execution_date=dttm, key="job_run_url"
-            )
-
-        return job_run_url
+    def get_link(self, operator: BaseOperator, *, ti_key=None):
+        return XCom.get_value(key="job_run_url", ti_key=ti_key)
 
 
 class DbtCloudRunJobOperator(BaseOperator):
diff --git a/airflow/providers/dbt/cloud/provider.yaml b/airflow/providers/dbt/cloud/provider.yaml
index 6fd5ce57fb..2dc162a461 100644
--- a/airflow/providers/dbt/cloud/provider.yaml
+++ b/airflow/providers/dbt/cloud/provider.yaml
@@ -30,7 +30,7 @@ versions:
   - 1.0.1
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-http
 
 integrations:
diff --git a/airflow/providers/dingding/provider.yaml b/airflow/providers/dingding/provider.yaml
index 8032435b36..5a230fa33a 100644
--- a/airflow/providers/dingding/provider.yaml
+++ b/airflow/providers/dingding/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-http
 
 integrations:
diff --git a/airflow/providers/discord/provider.yaml b/airflow/providers/discord/provider.yaml
index f5f0d650ff..0941025114 100644
--- a/airflow/providers/discord/provider.yaml
+++ b/airflow/providers/discord/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-http
 
 integrations:
diff --git a/airflow/providers/elasticsearch/provider.yaml b/airflow/providers/elasticsearch/provider.yaml
index d4fbb67f88..80260766f9 100644
--- a/airflow/providers/elasticsearch/provider.yaml
+++ b/airflow/providers/elasticsearch/provider.yaml
@@ -42,7 +42,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   - elasticsearch>7
   - elasticsearch-dbapi
diff --git a/airflow/providers/exasol/provider.yaml b/airflow/providers/exasol/provider.yaml
index 02c37aa3b8..2b0076309a 100644
--- a/airflow/providers/exasol/provider.yaml
+++ b/airflow/providers/exasol/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - pyexasol>=0.5.1
   - pandas>=0.17.1
diff --git a/airflow/providers/facebook/provider.yaml b/airflow/providers/facebook/provider.yaml
index 8d18a2869f..39618f54fc 100644
--- a/airflow/providers/facebook/provider.yaml
+++ b/airflow/providers/facebook/provider.yaml
@@ -36,7 +36,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - facebook-business>=6.0.2
 
 integrations:
diff --git a/airflow/providers/github/provider.yaml b/airflow/providers/github/provider.yaml
index 10f5f2188c..119f8b4e95 100644
--- a/airflow/providers/github/provider.yaml
+++ b/airflow/providers/github/provider.yaml
@@ -23,7 +23,7 @@ description: |
     `GitHub <https://www.github.com/>`__
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - pygithub
 
 versions:
diff --git a/airflow/providers/google/cloud/links/base.py b/airflow/providers/google/cloud/links/base.py
index 9a7fdef882..6539043a86 100644
--- a/airflow/providers/google/cloud/links/base.py
+++ b/airflow/providers/google/cloud/links/base.py
@@ -17,12 +17,12 @@
 # under the License.
 from __future__ import annotations
 
-from datetime import datetime
 from typing import TYPE_CHECKING, ClassVar
 
 from airflow.models import BaseOperatorLink, XCom
 
 if TYPE_CHECKING:
+    from airflow.models import BaseOperator
     from airflow.models.taskinstance import TaskInstanceKey
 
 
@@ -38,20 +38,11 @@ class BaseGoogleLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm: datetime | None = None,
-        ti_key: TaskInstanceKey | None = None,
+        operator: BaseOperator,
+        *,
+        ti_key: TaskInstanceKey,
     ) -> str:
-        if ti_key is not None:
-            conf = XCom.get_value(key=self.key, ti_key=ti_key)
-        else:
-            assert dttm
-            conf = XCom.get_one(
-                key=self.key,
-                dag_id=operator.dag.dag_id,
-                task_id=operator.task_id,
-                execution_date=dttm,
-            )
+        conf = XCom.get_value(key=self.key, ti_key=ti_key)
         if not conf:
             return ""
         if self.format_str.startswith(BASE_LINK):
diff --git a/airflow/providers/google/cloud/links/dataproc.py b/airflow/providers/google/cloud/links/dataproc.py
index ec543bbde8..573621aa14 100644
--- a/airflow/providers/google/cloud/links/dataproc.py
+++ b/airflow/providers/google/cloud/links/dataproc.py
@@ -18,13 +18,13 @@
 """This module contains Google Dataproc links."""
 from __future__ import annotations
 
-from datetime import datetime
 from typing import TYPE_CHECKING
 
 from airflow.models import BaseOperatorLink, XCom
 from airflow.providers.google.cloud.links.base import BASE_LINK
 
 if TYPE_CHECKING:
+    from airflow.models import BaseOperator
     from airflow.models.taskinstance import TaskInstanceKey
     from airflow.utils.context import Context
 
@@ -67,17 +67,11 @@ class DataprocLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm: datetime | None = None,
-        ti_key: TaskInstanceKey | None = None,
+        operator: BaseOperator,
+        *,
+        ti_key: TaskInstanceKey,
     ) -> str:
-        if ti_key is not None:
-            conf = XCom.get_value(key=self.key, ti_key=ti_key)
-        else:
-            assert dttm
-            conf = XCom.get_one(
-                key=self.key, dag_id=operator.dag.dag_id, task_id=operator.task_id, execution_date=dttm
-            )
+        conf = XCom.get_value(key=self.key, ti_key=ti_key)
         return (
             conf["url"].format(
                 region=conf["region"], project_id=conf["project_id"], resource=conf["resource"]
@@ -110,20 +104,11 @@ class DataprocListLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm: datetime | None = None,
-        ti_key: TaskInstanceKey | None = None,
+        operator: BaseOperator,
+        *,
+        ti_key: TaskInstanceKey,
     ) -> str:
-        if ti_key is not None:
-            list_conf = XCom.get_value(key=self.key, ti_key=ti_key)
-        else:
-            assert dttm
-            list_conf = XCom.get_one(
-                key=self.key,
-                dag_id=operator.dag.dag_id,
-                task_id=operator.task_id,
-                execution_date=dttm,
-            )
+        list_conf = XCom.get_value(key=self.key, ti_key=ti_key)
         return (
             list_conf["url"].format(
                 project_id=list_conf["project_id"],
diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py
index 9d9614e651..8db80d993a 100644
--- a/airflow/providers/google/cloud/operators/bigquery.py
+++ b/airflow/providers/google/cloud/operators/bigquery.py
@@ -21,7 +21,6 @@ from __future__ import annotations
 import enum
 import json
 import warnings
-from datetime import datetime
 from typing import TYPE_CHECKING, Any, Iterable, Optional, Sequence, SupportsAbs
 
 import attr
@@ -76,20 +75,11 @@ class BigQueryConsoleLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm: datetime | None = None,
-        ti_key: TaskInstanceKey | None = None,
+        operator: BaseOperator,
+        *,
+        ti_key: TaskInstanceKey,
     ):
-        if ti_key is not None:
-            job_id = XCom.get_value(key="job_id", ti_key=ti_key)
-        else:
-            assert dttm is not None
-            job_id = XCom.get_one(
-                dag_id=operator.dag.dag_id,
-                task_id=operator.task_id,
-                execution_date=dttm,
-                key="job_id",
-            )
+        job_id = XCom.get_value(key="job_id", ti_key=ti_key)
         return BIGQUERY_JOB_DETAILS_LINK_FMT.format(job_id=job_id) if job_id else ""
 
 
@@ -105,17 +95,11 @@ class BigQueryConsoleIndexableLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm: datetime | None = None,
-        ti_key: TaskInstanceKey | None = None,
+        operator: BaseOperator,
+        *,
+        ti_key: TaskInstanceKey,
     ):
-        if ti_key is not None:
-            job_ids = XCom.get_value(key="job_id", ti_key=ti_key)
-        else:
-            assert dttm is not None
-            job_ids = XCom.get_one(
-                key="job_id", dag_id=operator.dag.dag_id, task_id=operator.task_id, execution_date=dttm
-            )
+        job_ids = XCom.get_value(key="job_id", ti_key=ti_key)
         if not job_ids:
             return None
         if len(job_ids) < self.index:
diff --git a/airflow/providers/google/cloud/operators/dataproc_metastore.py b/airflow/providers/google/cloud/operators/dataproc_metastore.py
index 452a3c23ff..6e6e9fcfe3 100644
--- a/airflow/providers/google/cloud/operators/dataproc_metastore.py
+++ b/airflow/providers/google/cloud/operators/dataproc_metastore.py
@@ -18,7 +18,6 @@
 """This module contains Google Dataproc Metastore operators."""
 from __future__ import annotations
 
-from datetime import datetime
 from time import sleep
 from typing import TYPE_CHECKING, Sequence
 
@@ -82,20 +81,11 @@ class DataprocMetastoreLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm: datetime | None = None,
-        ti_key: TaskInstanceKey | None = None,
+        operator: BaseOperator,
+        *,
+        ti_key: TaskInstanceKey,
     ) -> str:
-        if ti_key is not None:
-            conf = XCom.get_value(key=self.key, ti_key=ti_key)
-        else:
-            assert dttm
-            conf = XCom.get_one(
-                dag_id=operator.dag.dag_id,
-                task_id=operator.task_id,
-                execution_date=dttm,
-                key=self.key,
-            )
+        conf = XCom.get_value(key=self.key, ti_key=ti_key)
         return (
             conf["url"].format(
                 region=conf["region"],
@@ -136,20 +126,11 @@ class DataprocMetastoreDetailedLink(BaseOperatorLink):
 
     def get_link(
         self,
-        operator,
-        dttm: datetime | None = None,
-        ti_key: TaskInstanceKey | None = None,
+        operator: BaseOperator,
+        *,
+        ti_key: TaskInstanceKey,
     ) -> str:
-        if ti_key is not None:
-            conf = XCom.get_value(key=self.key, ti_key=ti_key)
-        else:
-            assert dttm
-            conf = XCom.get_one(
-                dag_id=operator.dag.dag_id,
-                task_id=operator.task_id,
-                execution_date=dttm,
-                key=DataprocMetastoreDetailedLink.key,
-            )
+        conf = XCom.get_value(key=self.key, ti_key=ti_key)
         return (
             conf["url"].format(
                 region=conf["region"],
diff --git a/airflow/providers/google/cloud/triggers/cloud_composer.py b/airflow/providers/google/cloud/triggers/cloud_composer.py
index 4af241fcfb..9a745e7819 100644
--- a/airflow/providers/google/cloud/triggers/cloud_composer.py
+++ b/airflow/providers/google/cloud/triggers/cloud_composer.py
@@ -18,21 +18,11 @@
 from __future__ import annotations
 
 import asyncio
-import logging
 from typing import Any, Sequence
 
 from airflow import AirflowException
 from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerAsyncHook
-
-try:
-    from airflow.triggers.base import BaseTrigger, TriggerEvent
-except ImportError:
-    logging.getLogger(__name__).warning(
-        "Deferrable Operators only work starting Airflow 2.2",
-        exc_info=True,
-    )
-    BaseTrigger = object  # type: ignore
-    TriggerEvent = None  # type: ignore
+from airflow.triggers.base import BaseTrigger, TriggerEvent
 
 
 class CloudComposerExecutionTrigger(BaseTrigger):
diff --git a/airflow/providers/google/leveldb/hooks/leveldb.py b/airflow/providers/google/leveldb/hooks/leveldb.py
index e251ce3581..a60ba33ff0 100644
--- a/airflow/providers/google/leveldb/hooks/leveldb.py
+++ b/airflow/providers/google/leveldb/hooks/leveldb.py
@@ -17,23 +17,13 @@
 """Hook for Level DB"""
 from __future__ import annotations
 
+from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException
+from airflow.hooks.base import BaseHook
+
 try:
     import plyvel
     from plyvel import DB
-
-    from airflow.exceptions import AirflowException
-    from airflow.hooks.base import BaseHook
-
 except ImportError as e:
-    # Plyvel is an optional feature and if imports are missing, it should be silently ignored
-    # As of Airflow 2.3  and above the operator can throw OptionalProviderFeatureException
-    try:
-        from airflow.exceptions import AirflowOptionalProviderFeatureException
-    except ImportError:
-        # However, in order to keep backwards-compatibility with Airflow 2.1 and 2.2, if the
-        # 2.3 exception cannot be imported, the original ImportError should be raised.
-        # This try/except can be removed when the provider depends on Airflow >= 2.3.0
-        raise e from None
     raise AirflowOptionalProviderFeatureException(e)
 
 DB_NOT_INITIALIZED_BEFORE = "The `get_conn` method should be called before!"
diff --git a/airflow/providers/google/provider.yaml b/airflow/providers/google/provider.yaml
index 970827ea2a..fb0a3d434c 100644
--- a/airflow/providers/google/provider.yaml
+++ b/airflow/providers/google/provider.yaml
@@ -54,7 +54,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   # Google has very clear rules on what dependencies should be used. All the limits below
   # follow strict guidelines of Google Libraries as quoted here:
diff --git a/airflow/providers/grpc/provider.yaml b/airflow/providers/grpc/provider.yaml
index 133aa0c334..80702e33a6 100644
--- a/airflow/providers/grpc/provider.yaml
+++ b/airflow/providers/grpc/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.1
   - 1.0.0
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   # Google has very clear rules on what dependencies should be used. All the limits below
   # follow strict guidelines of Google Libraries as quoted here:
   # While this issue is open, dependents of google-api-core, google-cloud-core. and google-auth
diff --git a/airflow/providers/hashicorp/provider.yaml b/airflow/providers/hashicorp/provider.yaml
index 9a78fa1d74..72af712266 100644
--- a/airflow/providers/hashicorp/provider.yaml
+++ b/airflow/providers/hashicorp/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - hvac>=0.10
 
 integrations:
diff --git a/airflow/providers/influxdb/provider.yaml b/airflow/providers/influxdb/provider.yaml
index 628beee808..27699e85dd 100644
--- a/airflow/providers/influxdb/provider.yaml
+++ b/airflow/providers/influxdb/provider.yaml
@@ -24,7 +24,7 @@ description: |
     `InfluxDB <https://www.influxdata.com/>`__
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - influxdb-client>=1.19.0
   - requests>=2.26.0
 
diff --git a/airflow/providers/jdbc/provider.yaml b/airflow/providers/jdbc/provider.yaml
index 30c564e100..84850d248b 100644
--- a/airflow/providers/jdbc/provider.yaml
+++ b/airflow/providers/jdbc/provider.yaml
@@ -36,7 +36,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - jaydebeapi>=1.1.1
 
diff --git a/airflow/providers/jenkins/provider.yaml b/airflow/providers/jenkins/provider.yaml
index 8f8068f576..a605c4d463 100644
--- a/airflow/providers/jenkins/provider.yaml
+++ b/airflow/providers/jenkins/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - python-jenkins>=1.0.0
 
 integrations:
diff --git a/airflow/providers/microsoft/azure/operators/data_factory.py b/airflow/providers/microsoft/azure/operators/data_factory.py
index da1c47d2d8..fa29d38a06 100644
--- a/airflow/providers/microsoft/azure/operators/data_factory.py
+++ b/airflow/providers/microsoft/azure/operators/data_factory.py
@@ -26,34 +26,28 @@ from airflow.providers.microsoft.azure.hooks.data_factory import (
     AzureDataFactoryPipelineRunStatus,
     get_field,
 )
+from airflow.utils.log.logging_mixin import LoggingMixin
 
 if TYPE_CHECKING:
     from airflow.models.taskinstance import TaskInstanceKey
     from airflow.utils.context import Context
 
 
-class AzureDataFactoryPipelineRunLink(BaseOperatorLink):
+class AzureDataFactoryPipelineRunLink(LoggingMixin, BaseOperatorLink):
     """Constructs a link to monitor a pipeline run in Azure Data Factory."""
 
     name = "Monitor Pipeline Run"
 
     def get_link(
         self,
-        operator,
-        dttm=None,
+        operator: BaseOperator,
         *,
-        ti_key: TaskInstanceKey | None = None,
+        ti_key: TaskInstanceKey,
     ) -> str:
-        if ti_key is not None:
-            run_id = XCom.get_value(key="run_id", ti_key=ti_key)
-        else:
-            assert dttm
-            run_id = XCom.get_one(
-                key="run_id",
-                dag_id=operator.dag.dag_id,
-                task_id=operator.task_id,
-                execution_date=dttm,
-            )
+        if not isinstance(operator, AzureDataFactoryRunPipelineOperator):
+            self.log.info("The %s is not %s class.", operator.__class__, AzureDataFactoryRunPipelineOperator)
+            return ""
+        run_id = XCom.get_value(key="run_id", ti_key=ti_key)
         conn_id = operator.azure_data_factory_conn_id
         conn = BaseHook.get_connection(conn_id)
         extras = conn.extra_dejson
diff --git a/airflow/providers/microsoft/azure/provider.yaml b/airflow/providers/microsoft/azure/provider.yaml
index e5f2cecd81..bfa336e006 100644
--- a/airflow/providers/microsoft/azure/provider.yaml
+++ b/airflow/providers/microsoft/azure/provider.yaml
@@ -45,7 +45,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - azure-batch>=8.0.0
   - azure-cosmos>=4.0.0
   - azure-datalake-store>=0.0.45
diff --git a/airflow/providers/microsoft/mssql/operators/mssql.py b/airflow/providers/microsoft/mssql/operators/mssql.py
index e8685e0924..1b7c47886c 100644
--- a/airflow/providers/microsoft/mssql/operators/mssql.py
+++ b/airflow/providers/microsoft/mssql/operators/mssql.py
@@ -21,7 +21,6 @@ import warnings
 from typing import Sequence
 
 from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
-from airflow.www import utils as wwwutils
 
 
 class MsSqlOperator(SQLExecuteQueryOperator):
@@ -47,8 +46,7 @@ class MsSqlOperator(SQLExecuteQueryOperator):
 
     template_fields: Sequence[str] = ("sql",)
     template_ext: Sequence[str] = (".sql",)
-    # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
-    template_fields_renderers = {"sql": "tsql" if "tsql" in wwwutils.get_attr_renderer() else "sql"}
+    template_fields_renderers = {"sql": "tsql"}
     ui_color = "#ededed"
 
     def __init__(
diff --git a/airflow/providers/microsoft/mssql/provider.yaml b/airflow/providers/microsoft/mssql/provider.yaml
index 9d3c9a4feb..86562830ea 100644
--- a/airflow/providers/microsoft/mssql/provider.yaml
+++ b/airflow/providers/microsoft/mssql/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - pymssql>=2.1.5; platform_machine != "aarch64"
 
diff --git a/airflow/providers/microsoft/psrp/operators/psrp.py b/airflow/providers/microsoft/psrp/operators/psrp.py
index 617f433530..733b8cb29f 100644
--- a/airflow/providers/microsoft/psrp/operators/psrp.py
+++ b/airflow/providers/microsoft/psrp/operators/psrp.py
@@ -28,12 +28,7 @@ from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
 from airflow.providers.microsoft.psrp.hooks.psrp import PsrpHook
 from airflow.settings import json
-
-
-# TODO: Replace with airflow.utils.helpers.exactly_one in Airflow 2.3.
-def exactly_one(*args):
-    return len(set(filter(None, args))) == 1
-
+from airflow.utils.helpers import exactly_one
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
diff --git a/airflow/providers/microsoft/winrm/provider.yaml b/airflow/providers/microsoft/winrm/provider.yaml
index 96c37d0e41..fa2e895fbb 100644
--- a/airflow/providers/microsoft/winrm/provider.yaml
+++ b/airflow/providers/microsoft/winrm/provider.yaml
@@ -35,7 +35,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - pywinrm>=0.4
 
 integrations:
diff --git a/airflow/providers/mongo/provider.yaml b/airflow/providers/mongo/provider.yaml
index a7fdd51e64..530bc09763 100644
--- a/airflow/providers/mongo/provider.yaml
+++ b/airflow/providers/mongo/provider.yaml
@@ -34,7 +34,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - dnspython>=1.13.0
   # pymongo 4.0.0 removes connection option `ssl_cert_reqs` which is used in providers-mongo/2.2.0
   # TODO: Upgrade to pymongo 4.0.0+
diff --git a/airflow/providers/mysql/operators/mysql.py b/airflow/providers/mysql/operators/mysql.py
index 886a07b6b7..1609d09411 100644
--- a/airflow/providers/mysql/operators/mysql.py
+++ b/airflow/providers/mysql/operators/mysql.py
@@ -21,7 +21,6 @@ import warnings
 from typing import Sequence
 
 from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
-from airflow.www import utils as wwwutils
 
 
 class MySqlOperator(SQLExecuteQueryOperator):
@@ -46,9 +45,8 @@ class MySqlOperator(SQLExecuteQueryOperator):
     """
 
     template_fields: Sequence[str] = ("sql", "parameters")
-    # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
     template_fields_renderers = {
-        "sql": "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql",
+        "sql": "mysql",
         "parameters": "json",
     }
     template_ext: Sequence[str] = (".sql", ".json")
diff --git a/airflow/providers/mysql/provider.yaml b/airflow/providers/mysql/provider.yaml
index 965234657b..7021768e0e 100644
--- a/airflow/providers/mysql/provider.yaml
+++ b/airflow/providers/mysql/provider.yaml
@@ -39,7 +39,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - mysql-connector-python>=8.0.11; platform_machine != "aarch64"
   - mysqlclient>=1.3.6; platform_machine != "aarch64"
diff --git a/airflow/providers/mysql/transfers/presto_to_mysql.py b/airflow/providers/mysql/transfers/presto_to_mysql.py
index 30e12684fd..b38e6b8654 100644
--- a/airflow/providers/mysql/transfers/presto_to_mysql.py
+++ b/airflow/providers/mysql/transfers/presto_to_mysql.py
@@ -22,7 +22,6 @@ from typing import TYPE_CHECKING, Sequence
 from airflow.models import BaseOperator
 from airflow.providers.mysql.hooks.mysql import MySqlHook
 from airflow.providers.presto.hooks.presto import PrestoHook
-from airflow.www import utils as wwwutils
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
@@ -47,10 +46,9 @@ class PrestoToMySqlOperator(BaseOperator):
 
     template_fields: Sequence[str] = ("sql", "mysql_table", "mysql_preoperator")
     template_ext: Sequence[str] = (".sql",)
-    # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
     template_fields_renderers = {
         "sql": "sql",
-        "mysql_preoperator": "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql",
+        "mysql_preoperator": "mysql",
     }
     ui_color = "#a0e08c"
 
diff --git a/airflow/providers/mysql/transfers/trino_to_mysql.py b/airflow/providers/mysql/transfers/trino_to_mysql.py
index 5a1763d8ed..8ff5ed0446 100644
--- a/airflow/providers/mysql/transfers/trino_to_mysql.py
+++ b/airflow/providers/mysql/transfers/trino_to_mysql.py
@@ -22,7 +22,6 @@ from typing import TYPE_CHECKING, Sequence
 from airflow.models import BaseOperator
 from airflow.providers.mysql.hooks.mysql import MySqlHook
 from airflow.providers.trino.hooks.trino import TrinoHook
-from airflow.www import utils as wwwutils
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
@@ -47,10 +46,9 @@ class TrinoToMySqlOperator(BaseOperator):
 
     template_fields: Sequence[str] = ("sql", "mysql_table", "mysql_preoperator")
     template_ext: Sequence[str] = (".sql",)
-    # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
     template_fields_renderers = {
         "sql": "sql",
-        "mysql_preoperator": "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql",
+        "mysql_preoperator": "mysql",
     }
     ui_color = "#a0e08c"
 
diff --git a/airflow/providers/mysql/transfers/vertica_to_mysql.py b/airflow/providers/mysql/transfers/vertica_to_mysql.py
index c9a4d17090..a7df1f029d 100644
--- a/airflow/providers/mysql/transfers/vertica_to_mysql.py
+++ b/airflow/providers/mysql/transfers/vertica_to_mysql.py
@@ -27,14 +27,10 @@ import unicodecsv as csv
 from airflow.models import BaseOperator
 from airflow.providers.mysql.hooks.mysql import MySqlHook
 from airflow.providers.vertica.hooks.vertica import VerticaHook
-from airflow.www import utils as wwwutils
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
 
-# TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
-MYSQL_RENDERER = "mysql" if "mysql" in wwwutils.get_attr_renderer() else "sql"
-
 
 class VerticaToMySqlOperator(BaseOperator):
     """
@@ -62,8 +58,8 @@ class VerticaToMySqlOperator(BaseOperator):
     template_ext: Sequence[str] = (".sql",)
     template_fields_renderers = {
         "sql": "sql",
-        "mysql_preoperator": MYSQL_RENDERER,
-        "mysql_postoperator": MYSQL_RENDERER,
+        "mysql_preoperator": "mysql",
+        "mysql_postoperator": "mysql",
     }
     ui_color = "#a0e08c"
 
diff --git a/airflow/providers/neo4j/provider.yaml b/airflow/providers/neo4j/provider.yaml
index 9904d5193c..147429644c 100644
--- a/airflow/providers/neo4j/provider.yaml
+++ b/airflow/providers/neo4j/provider.yaml
@@ -35,7 +35,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - neo4j>=4.2.1
 
 integrations:
diff --git a/airflow/providers/odbc/provider.yaml b/airflow/providers/odbc/provider.yaml
index 69df89bb7a..c45b64facc 100644
--- a/airflow/providers/odbc/provider.yaml
+++ b/airflow/providers/odbc/provider.yaml
@@ -35,7 +35,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   - pyodbc
 
diff --git a/airflow/providers/openfaas/provider.yaml b/airflow/providers/openfaas/provider.yaml
index 3962a571c9..ea056029c1 100644
--- a/airflow/providers/openfaas/provider.yaml
+++ b/airflow/providers/openfaas/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
 
 integrations:
   - integration-name: OpenFaaS
diff --git a/airflow/providers/opsgenie/provider.yaml b/airflow/providers/opsgenie/provider.yaml
index 255f8ecf46..06e6f354f1 100644
--- a/airflow/providers/opsgenie/provider.yaml
+++ b/airflow/providers/opsgenie/provider.yaml
@@ -35,7 +35,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - opsgenie-sdk>=2.1.5
 
 integrations:
diff --git a/airflow/providers/oracle/hooks/oracle.py b/airflow/providers/oracle/hooks/oracle.py
index e8afaba443..0ba7425e16 100644
--- a/airflow/providers/oracle/hooks/oracle.py
+++ b/airflow/providers/oracle/hooks/oracle.py
@@ -99,6 +99,7 @@ class OracleHook(DbApiHook):
     conn_type = "oracle"
     hook_name = "Oracle"
 
+    _test_connection_sql = "select 1 from dual"
     supports_autocommit = True
 
     def __init__(
@@ -424,18 +425,3 @@ class OracleHook(DbApiHook):
         )
 
         return result
-
-    # TODO: Merge this implementation back to DbApiHook when dropping
-    # support for Airflow 2.2.
-    def test_connection(self):
-        """Tests the connection by executing a select 1 from dual query"""
-        status, message = False, ""
-        try:
-            if self.get_first("select 1 from dual"):
-                status = True
-                message = "Connection successfully tested"
-        except Exception as e:
-            status = False
-            message = str(e)
-
-        return status, message
diff --git a/airflow/providers/oracle/provider.yaml b/airflow/providers/oracle/provider.yaml
index 07419afcb5..21e713b895 100644
--- a/airflow/providers/oracle/provider.yaml
+++ b/airflow/providers/oracle/provider.yaml
@@ -39,7 +39,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - oracledb>=1.0.0
 
diff --git a/airflow/providers/pagerduty/provider.yaml b/airflow/providers/pagerduty/provider.yaml
index 537bca625c..b0c24d6ad6 100644
--- a/airflow/providers/pagerduty/provider.yaml
+++ b/airflow/providers/pagerduty/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - pdpyras>=4.1.2
 
 integrations:
diff --git a/airflow/providers/papermill/provider.yaml b/airflow/providers/papermill/provider.yaml
index fcc9a0543d..cafc3aa587 100644
--- a/airflow/providers/papermill/provider.yaml
+++ b/airflow/providers/papermill/provider.yaml
@@ -35,7 +35,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - papermill[all]>=1.2.1
   - scrapbook[all]
 
diff --git a/airflow/providers/plexus/provider.yaml b/airflow/providers/plexus/provider.yaml
index 1b9f5054ea..e48ed155ba 100644
--- a/airflow/providers/plexus/provider.yaml
+++ b/airflow/providers/plexus/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - arrow>=0.16.0
 
 integrations:
diff --git a/airflow/providers/postgres/operators/postgres.py b/airflow/providers/postgres/operators/postgres.py
index 10845f0e57..561d06c167 100644
--- a/airflow/providers/postgres/operators/postgres.py
+++ b/airflow/providers/postgres/operators/postgres.py
@@ -23,7 +23,6 @@ from typing import Mapping, Sequence
 from psycopg2.sql import SQL, Identifier
 
 from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
-from airflow.www import utils as wwwutils
 
 
 class PostgresOperator(SQLExecuteQueryOperator):
@@ -42,10 +41,7 @@ class PostgresOperator(SQLExecuteQueryOperator):
     """
 
     template_fields: Sequence[str] = ("sql",)
-    # TODO: Remove renderer check when the provider has an Airflow 2.3+ requirement.
-    template_fields_renderers = {
-        "sql": "postgresql" if "postgresql" in wwwutils.get_attr_renderer() else "sql"
-    }
+    template_fields_renderers = {"sql": "postgresql"}
     template_ext: Sequence[str] = (".sql",)
     ui_color = "#ededed"
 
diff --git a/airflow/providers/postgres/provider.yaml b/airflow/providers/postgres/provider.yaml
index ebf65ab7d2..19b21af4fe 100644
--- a/airflow/providers/postgres/provider.yaml
+++ b/airflow/providers/postgres/provider.yaml
@@ -41,7 +41,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - psycopg2>=2.8.0
 
diff --git a/airflow/providers/presto/hooks/presto.py b/airflow/providers/presto/hooks/presto.py
index 6a1d25407f..902ae67eac 100644
--- a/airflow/providers/presto/hooks/presto.py
+++ b/airflow/providers/presto/hooks/presto.py
@@ -48,14 +48,11 @@ def generate_presto_client_info() -> str:
         )
         for format_map in AIRFLOW_VAR_NAME_FORMAT_MAPPING.values()
     }
-    # try_number isn't available in context for airflow < 2.2.5
-    # https://github.com/apache/airflow/issues/23059
-    try_number = context_var.get("try_number", "")
     task_info = {
         "dag_id": context_var["dag_id"],
         "task_id": context_var["task_id"],
         "execution_date": context_var["execution_date"],
-        "try_number": try_number,
+        "try_number": context_var["try_number"],
         "dag_run_id": context_var["dag_run_id"],
         "dag_owner": context_var["dag_owner"],
     }
diff --git a/airflow/providers/presto/provider.yaml b/airflow/providers/presto/provider.yaml
index 9bb9060248..22c7b8b7d0 100644
--- a/airflow/providers/presto/provider.yaml
+++ b/airflow/providers/presto/provider.yaml
@@ -38,7 +38,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   - presto-python-client>=0.8.2
   - pandas>=0.17.1
diff --git a/airflow/providers/qubole/hooks/qubole.py b/airflow/providers/qubole/hooks/qubole.py
index 8101586d5f..7b64820397 100644
--- a/airflow/providers/qubole/hooks/qubole.py
+++ b/airflow/providers/qubole/hooks/qubole.py
@@ -229,9 +229,7 @@ class QuboleHook(BaseHook):
         """
         if fp is None:
             iso = datetime.datetime.utcnow().isoformat()
-            base_log_folder = conf.get("logging", "BASE_LOG_FOLDER")
-            if base_log_folder is None:
-                raise ValueError("logging/BASE_LOG_FOLDER config value should be set")
+            base_log_folder = conf.get_mandatory_value("logging", "BASE_LOG_FOLDER")
             logpath = os.path.expanduser(base_log_folder)
             resultpath = logpath + "/" + self.dag_id + "/" + self.task_id + "/results"
             pathlib.Path(resultpath).mkdir(parents=True, exist_ok=True)
diff --git a/airflow/providers/qubole/operators/qubole.py b/airflow/providers/qubole/operators/qubole.py
index c968657cf3..710387663f 100644
--- a/airflow/providers/qubole/operators/qubole.py
+++ b/airflow/providers/qubole/operators/qubole.py
@@ -19,7 +19,6 @@
 from __future__ import annotations
 
 import re
-from datetime import datetime
 from typing import TYPE_CHECKING, Sequence
 
 from airflow.hooks.base import BaseHook
@@ -33,6 +32,7 @@ from airflow.providers.qubole.hooks.qubole import (
 )
 
 if TYPE_CHECKING:
+
     from airflow.models.taskinstance import TaskInstanceKey
     from airflow.utils.context import Context
 
@@ -45,15 +45,13 @@ class QDSLink(BaseOperatorLink):
     def get_link(
         self,
         operator: BaseOperator,
-        dttm: datetime | None = None,
         *,
-        ti_key: TaskInstanceKey | None = None,
+        ti_key: TaskInstanceKey,
     ) -> str:
         """
         Get link to qubole command result page.
 
         :param operator: operator
-        :param dttm: datetime
         :return: url link
         """
         conn = BaseHook.get_connection(
@@ -64,13 +62,7 @@ class QDSLink(BaseOperatorLink):
             host = re.sub(r"api$", "v2/analyze?command_id=", conn.host)
         else:
             host = "https://api.qubole.com/v2/analyze?command_id="
-        if ti_key is not None:
-            qds_command_id = XCom.get_value(key="qbol_cmd_id", ti_key=ti_key)
-        else:
-            assert dttm
-            qds_command_id = XCom.get_one(
-                key="qbol_cmd_id", dag_id=operator.dag_id, task_id=operator.task_id, execution_date=dttm
-            )
+        qds_command_id = XCom.get_value(key="qbol_cmd_id", ti_key=ti_key)
         url = host + str(qds_command_id) if qds_command_id else ""
         return url
 
diff --git a/airflow/providers/qubole/provider.yaml b/airflow/providers/qubole/provider.yaml
index f4f31c0c4e..6cfaec23ba 100644
--- a/airflow/providers/qubole/provider.yaml
+++ b/airflow/providers/qubole/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   - qds-sdk>=1.10.4
 
diff --git a/airflow/providers/redis/provider.yaml b/airflow/providers/redis/provider.yaml
index 15c353d8d8..5b5ae05867 100644
--- a/airflow/providers/redis/provider.yaml
+++ b/airflow/providers/redis/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   # Redis 4 introduced a number of changes that likely need testing including mixins in redis commands
   # as well as unquoting URLS with `urllib.parse.unquote`:
   # https://github.com/redis/redis-py/blob/master/CHANGES
diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/salesforce/provider.yaml
index 2c31a38210..406fbc02fb 100644
--- a/airflow/providers/salesforce/provider.yaml
+++ b/airflow/providers/salesforce/provider.yaml
@@ -39,7 +39,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - simple-salesforce>=1.0.0
   - pandas>=0.17.1
 
diff --git a/airflow/providers/samba/provider.yaml b/airflow/providers/samba/provider.yaml
index dd9c8674ca..41cab949bb 100644
--- a/airflow/providers/samba/provider.yaml
+++ b/airflow/providers/samba/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - smbprotocol>=1.5.0
 
 integrations:
diff --git a/airflow/providers/segment/provider.yaml b/airflow/providers/segment/provider.yaml
index 0cede0faa0..2233f743cf 100644
--- a/airflow/providers/segment/provider.yaml
+++ b/airflow/providers/segment/provider.yaml
@@ -32,7 +32,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - analytics-python>=1.2.9
 
 integrations:
diff --git a/airflow/providers/sendgrid/provider.yaml b/airflow/providers/sendgrid/provider.yaml
index 8c8fe945b0..62148897c4 100644
--- a/airflow/providers/sendgrid/provider.yaml
+++ b/airflow/providers/sendgrid/provider.yaml
@@ -22,7 +22,7 @@ description: |
     `Sendgrid <https://sendgrid.com/>`__
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - sendgrid>=6.0.0
 
 versions:
diff --git a/airflow/providers/sftp/provider.yaml b/airflow/providers/sftp/provider.yaml
index de703fad14..d04a842b69 100644
--- a/airflow/providers/sftp/provider.yaml
+++ b/airflow/providers/sftp/provider.yaml
@@ -42,7 +42,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-ssh>=2.1.0
 
 integrations:
diff --git a/airflow/providers/singularity/provider.yaml b/airflow/providers/singularity/provider.yaml
index 800bc02653..0aaae5bff2 100644
--- a/airflow/providers/singularity/provider.yaml
+++ b/airflow/providers/singularity/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - spython>=0.0.56
 
 integrations:
diff --git a/airflow/providers/slack/provider.yaml b/airflow/providers/slack/provider.yaml
index 80b858ebc9..34be15a996 100644
--- a/airflow/providers/slack/provider.yaml
+++ b/airflow/providers/slack/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.2.0
   - slack_sdk>=3.0.0
 
diff --git a/airflow/providers/slack/transfers/sql_to_slack.py b/airflow/providers/slack/transfers/sql_to_slack.py
index 00741d7ca0..bdd1cddd2b 100644
--- a/airflow/providers/slack/transfers/sql_to_slack.py
+++ b/airflow/providers/slack/transfers/sql_to_slack.py
@@ -18,16 +18,14 @@ from __future__ import annotations
 
 from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
 
-from packaging.version import Version
 from pandas import DataFrame
 from tabulate import tabulate
 
 from airflow.exceptions import AirflowException
 from airflow.hooks.base import BaseHook
 from airflow.models import BaseOperator
-from airflow.providers.common.sql.hooks.sql import DbApiHook, _backported_get_hook
+from airflow.providers.common.sql.hooks.sql import DbApiHook
 from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook
-from airflow.version import version
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
@@ -102,13 +100,7 @@ class SqlToSlackOperator(BaseOperator):
     def _get_hook(self) -> DbApiHook:
         self.log.debug("Get connection for %s", self.sql_conn_id)
         conn = BaseHook.get_connection(self.sql_conn_id)
-        if Version(version) >= Version("2.3"):
-            # "hook_params" were introduced to into "get_hook()" only in Airflow 2.3.
-            hook = conn.get_hook(hook_params=self.sql_hook_params)  # ignore airflow compat check
-        else:
-            # For supporting Airflow versions < 2.3, we backport "get_hook()" method. This should be removed
-            # when "apache-airflow-providers-slack" will depend on Airflow >= 2.3.
-            hook = _backported_get_hook(conn, hook_params=self.sql_hook_params)
+        hook = conn.get_hook(hook_params=self.sql_hook_params)
         if not callable(getattr(hook, "get_pandas_df", None)):
             raise AirflowException(
                 "This hook is not supported. The hook class must have get_pandas_df method."
diff --git a/airflow/providers/slack/utils/__init__.py b/airflow/providers/slack/utils/__init__.py
index f71901ff84..dda12656d4 100644
--- a/airflow/providers/slack/utils/__init__.py
+++ b/airflow/providers/slack/utils/__init__.py
@@ -19,14 +19,7 @@ from __future__ import annotations
 import warnings
 from typing import Any
 
-try:
-    from airflow.utils.types import NOTSET
-except ImportError:  # TODO: Remove when the provider has an Airflow 2.3+ requirement.
-
-    class ArgNotSet:
-        """Sentinel type for annotations, useful when None is not viable."""
-
-    NOTSET = ArgNotSet()  # type: ignore[assignment]
+from airflow.utils.types import NOTSET
 
 
 class ConnectionExtraConfig:
diff --git a/airflow/providers/snowflake/hooks/snowflake.py b/airflow/providers/snowflake/hooks/snowflake.py
index c4677d6f47..138025a455 100644
--- a/airflow/providers/snowflake/hooks/snowflake.py
+++ b/airflow/providers/snowflake/hooks/snowflake.py
@@ -112,6 +112,7 @@ class SnowflakeHook(DbApiHook):
     conn_type = "snowflake"
     hook_name = "Snowflake"
     supports_autocommit = True
+    _test_connection_sql = "select 1"
 
     @staticmethod
     def get_connection_form_widgets() -> dict[str, Any]:
@@ -390,11 +391,3 @@ class SnowflakeHook(DbApiHook):
             return results[-1]
         else:
             return results
-
-    def test_connection(self):
-        """Test the Snowflake connection by running a simple query."""
-        try:
-            self.run(sql="select 1")
-        except Exception as e:
-            return False, str(e)
-        return True, "Connection successfully tested"
diff --git a/airflow/providers/snowflake/provider.yaml b/airflow/providers/snowflake/provider.yaml
index 98f640a20e..c77ee86c71 100644
--- a/airflow/providers/snowflake/provider.yaml
+++ b/airflow/providers/snowflake/provider.yaml
@@ -45,7 +45,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - snowflake-connector-python>=2.4.1
   - snowflake-sqlalchemy>=1.1.0
diff --git a/airflow/providers/ssh/provider.yaml b/airflow/providers/ssh/provider.yaml
index 22e86dcc2e..76722dd1a7 100644
--- a/airflow/providers/ssh/provider.yaml
+++ b/airflow/providers/ssh/provider.yaml
@@ -41,7 +41,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - paramiko>=2.6.0
   - sshtunnel>=0.3.2
 
diff --git a/airflow/providers/tableau/provider.yaml b/airflow/providers/tableau/provider.yaml
index 75e2fbe924..565d821035 100644
--- a/airflow/providers/tableau/provider.yaml
+++ b/airflow/providers/tableau/provider.yaml
@@ -37,7 +37,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - tableauserverclient
 
 integrations:
diff --git a/airflow/providers/tabular/provider.yaml b/airflow/providers/tabular/provider.yaml
index 65f567bc40..360ca5bfd1 100644
--- a/airflow/providers/tabular/provider.yaml
+++ b/airflow/providers/tabular/provider.yaml
@@ -26,7 +26,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
 
 integrations:
   - integration-name: Tabular
diff --git a/airflow/providers/telegram/provider.yaml b/airflow/providers/telegram/provider.yaml
index e8097501ff..37652639ef 100644
--- a/airflow/providers/telegram/provider.yaml
+++ b/airflow/providers/telegram/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - python-telegram-bot>=13.0
 
 integrations:
diff --git a/airflow/providers/trino/hooks/trino.py b/airflow/providers/trino/hooks/trino.py
index 7b96373484..629ea2bf57 100644
--- a/airflow/providers/trino/hooks/trino.py
+++ b/airflow/providers/trino/hooks/trino.py
@@ -19,7 +19,6 @@ from __future__ import annotations
 
 import json
 import os
-from contextlib import closing
 from typing import Any, Callable, Iterable, Mapping
 
 import trino
@@ -49,14 +48,11 @@ def generate_trino_client_info() -> str:
         )
         for format_map in AIRFLOW_VAR_NAME_FORMAT_MAPPING.values()
     }
-    # try_number isn't available in context for airflow < 2.2.5
-    # https://github.com/apache/airflow/issues/23059
-    try_number = context_var.get("try_number", "")
     task_info = {
         "dag_id": context_var["dag_id"],
         "task_id": context_var["task_id"],
         "execution_date": context_var["execution_date"],
-        "try_number": try_number,
+        "try_number": context_var["try_number"],
         "dag_run_id": context_var["dag_run_id"],
         "dag_owner": context_var["dag_owner"],
     }
@@ -94,6 +90,7 @@ class TrinoHook(DbApiHook):
     hook_name = "Trino"
     query_id = ""
     placeholder = "?"
+    _test_connection_sql = "select 1"
 
     def get_conn(self) -> Connection:
         """Returns a connection object"""
@@ -243,19 +240,3 @@ class TrinoHook(DbApiHook):
             commit_every = 0
 
         super().insert_rows(table, rows, target_fields, commit_every, replace)
-
-    def test_connection(self):
-        """Tests the connection from UI using Trino specific query"""
-        status, message = False, ""
-        try:
-            with closing(self.get_conn()) as conn:
-                with closing(conn.cursor()) as cur:
-                    cur.execute("select 1")
-                    if cur.fetchone():
-                        status = True
-                        message = "Connection successfully tested"
-        except Exception as e:
-            status = False
-            message = str(e)
-
-        return status, message
diff --git a/airflow/providers/trino/provider.yaml b/airflow/providers/trino/provider.yaml
index 57da2ba254..422ddac7a6 100644
--- a/airflow/providers/trino/provider.yaml
+++ b/airflow/providers/trino/provider.yaml
@@ -38,7 +38,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - pandas>=0.17.1
   - trino>=0.301.0
diff --git a/airflow/providers/vertica/provider.yaml b/airflow/providers/vertica/provider.yaml
index ccadf7b385..8127122b11 100644
--- a/airflow/providers/vertica/provider.yaml
+++ b/airflow/providers/vertica/provider.yaml
@@ -36,7 +36,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - apache-airflow-providers-common-sql>=1.3.0
   - vertica-python>=0.5.1
 
diff --git a/airflow/providers/yandex/provider.yaml b/airflow/providers/yandex/provider.yaml
index d7f5fe8697..af0d1856a6 100644
--- a/airflow/providers/yandex/provider.yaml
+++ b/airflow/providers/yandex/provider.yaml
@@ -34,7 +34,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - yandexcloud>=0.173.0
 
 integrations:
diff --git a/airflow/providers/zendesk/provider.yaml b/airflow/providers/zendesk/provider.yaml
index 6dc1cf264d..0276af44cf 100644
--- a/airflow/providers/zendesk/provider.yaml
+++ b/airflow/providers/zendesk/provider.yaml
@@ -33,7 +33,7 @@ versions:
   - 1.0.0
 
 dependencies:
-  - apache-airflow>=2.2.0
+  - apache-airflow>=2.3.0
   - zenpy>=2.0.24
 
 integrations:
diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py
index 9e4b5b5bf1..b64d92b5c4 100644
--- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py
+++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py
@@ -25,8 +25,8 @@ PRE_COMMIT_LIST = [
     "all",
     "black",
     "blacken-docs",
-    "check-airflow-2-2-compatibility",
     "check-airflow-config-yaml-consistent",
+    "check-airflow-provider-compatibility",
     "check-apache-license-rat",
     "check-base-operator-partial-arguments",
     "check-base-operator-usage",
diff --git a/docs/apache-airflow-providers/howto/create-update-providers.rst b/docs/apache-airflow-providers/howto/create-update-providers.rst
index 4b80923986..2a06c9a9fa 100644
--- a/docs/apache-airflow-providers/howto/create-update-providers.rst
+++ b/docs/apache-airflow-providers/howto/create-update-providers.rst
@@ -327,7 +327,7 @@ this (note the ``if ti_key is not None:`` condition).
 
     def get_link(
         self,
-        operator,
+        operator: BaseOperator,
         dttm: Optional[datetime] = None,
         ti_key: Optional["TaskInstanceKey"] = None,
     ):
diff --git a/docs/apache-airflow/howto/define_extra_link.rst b/docs/apache-airflow/howto/define_extra_link.rst
index 9bae547492..1436c2ece7 100644
--- a/docs/apache-airflow/howto/define_extra_link.rst
+++ b/docs/apache-airflow/howto/define_extra_link.rst
@@ -34,7 +34,7 @@ The following code shows how to add extra links to an operator via Plugins:
     class GoogleLink(BaseOperatorLink):
         name = "Google"
 
-        def get_link(self, operator, *, ti_key):
+        def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey):
             return "https://www.google.com"
 
 
@@ -92,7 +92,7 @@ tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Ope
       # Example: operators = [GCSToS3Operator, GCSToBigQueryOperator]
       operators = [GCSToS3Operator]
 
-      def get_link(self, operator, *, ti_key):
+      def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey):
           return "https://s3.amazonaws.com/airflow-logs/{dag_id}/{task_id}/{run_id}".format(
               dag_id=operator.dag_id,
               task_id=operator.task_id,
@@ -134,7 +134,7 @@ Console, but if we wanted to change that link we could:
         name = "BigQuery Console"
         operators = [BigQueryOperator]
 
-        def get_link(self, operator, *, ti_key):
+        def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey):
             job_id = XCom.get_one(ti_key=ti_key, key="job_id")
             return BIGQUERY_JOB_DETAILS_LINK_FMT.format(job_id=job_id) if job_id else ""
 
diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json
index 74fa32863e..0be78fbc75 100644
--- a/generated/provider_dependencies.json
+++ b/generated/provider_dependencies.json
@@ -2,7 +2,7 @@
   "airbyte": {
     "deps": [
       "apache-airflow-providers-http",
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": [
       "http"
@@ -10,7 +10,7 @@
   },
   "alibaba": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "oss2>=2.14.0"
     ],
     "cross-providers-deps": []
@@ -18,7 +18,7 @@
   "amazon": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "boto3>=1.15.0",
       "jsonpath_ng>=1.5.3",
       "mypy-boto3-appflow>=1.21.0",
@@ -44,7 +44,7 @@
   },
   "apache.beam": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "apache-beam>=2.39.0"
     ],
     "cross-providers-deps": [
@@ -53,7 +53,7 @@
   },
   "apache.cassandra": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "cassandra-driver>=3.13.0"
     ],
     "cross-providers-deps": []
@@ -61,7 +61,7 @@
   "apache.drill": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "sqlalchemy-drill>=1.1.0"
     ],
     "cross-providers-deps": [
@@ -71,7 +71,7 @@
   "apache.druid": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pydruid>=0.4.1"
     ],
     "cross-providers-deps": [
@@ -81,7 +81,7 @@
   },
   "apache.hdfs": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "hdfs[avro,dataframe,kerberos]>=2.0.4",
       "snakebite-py3"
     ],
@@ -90,7 +90,7 @@
   "apache.hive": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "hmsclient>=0.1.0",
       "pandas>=0.17.1",
       "pyhive[hive]>=0.6.0",
@@ -109,7 +109,7 @@
   },
   "apache.kylin": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "kylinpy>=2.6"
     ],
     "cross-providers-deps": []
@@ -117,7 +117,7 @@
   "apache.livy": {
     "deps": [
       "apache-airflow-providers-http",
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": [
       "http"
@@ -125,14 +125,14 @@
   },
   "apache.pig": {
     "deps": [
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": []
   },
   "apache.pinot": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pinotdb>0.4.7"
     ],
     "cross-providers-deps": [
@@ -141,27 +141,27 @@
   },
   "apache.spark": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pyspark"
     ],
     "cross-providers-deps": []
   },
   "apache.sqoop": {
     "deps": [
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": []
   },
   "arangodb": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "python-arango>=7.3.2"
     ],
     "cross-providers-deps": []
   },
   "asana": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "asana>=0.10"
     ],
     "cross-providers-deps": []
@@ -169,13 +169,13 @@
   "atlassian.jira": {
     "deps": [
       "JIRA>1.0.7",
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": []
   },
   "celery": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "celery>=5.2.3,<6",
       "flower>=1.0.0"
     ],
@@ -183,7 +183,7 @@
   },
   "cloudant": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "cloudant>=2.0"
     ],
     "cross-providers-deps": []
@@ -206,7 +206,7 @@
     "deps": [
       "aiohttp>=3.6.3, <4",
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "databricks-sql-connector>=2.0.0, <3.0.0",
       "requests>=2.27,<3"
     ],
@@ -216,7 +216,7 @@
   },
   "datadog": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "datadog>=0.14.0"
     ],
     "cross-providers-deps": []
@@ -224,7 +224,7 @@
   "dbt.cloud": {
     "deps": [
       "apache-airflow-providers-http",
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": [
       "http"
@@ -233,7 +233,7 @@
   "dingding": {
     "deps": [
       "apache-airflow-providers-http",
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": [
       "http"
@@ -242,7 +242,7 @@
   "discord": {
     "deps": [
       "apache-airflow-providers-http",
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": [
       "http"
@@ -258,7 +258,7 @@
   "elasticsearch": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "elasticsearch-dbapi",
       "elasticsearch-dsl>=5.0.0",
       "elasticsearch>7"
@@ -270,7 +270,7 @@
   "exasol": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pandas>=0.17.1",
       "pyexasol>=0.5.1"
     ],
@@ -280,7 +280,7 @@
   },
   "facebook": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "facebook-business>=6.0.2"
     ],
     "cross-providers-deps": []
@@ -291,7 +291,7 @@
   },
   "github": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pygithub"
     ],
     "cross-providers-deps": []
@@ -300,7 +300,7 @@
     "deps": [
       "PyOpenSSL",
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "asgiref>=3.5.2",
       "gcloud-aio-bigquery>=6.1.2",
       "gcloud-aio-storage",
@@ -371,7 +371,7 @@
   },
   "grpc": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "google-auth-httplib2>=0.0.1",
       "google-auth>=1.0.0, <3.0.0",
       "grpcio>=1.15.0"
@@ -380,7 +380,7 @@
   },
   "hashicorp": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "hvac>=0.10"
     ],
     "cross-providers-deps": [
@@ -400,7 +400,7 @@
   },
   "influxdb": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "influxdb-client>=1.19.0",
       "requests>=2.26.0"
     ],
@@ -409,7 +409,7 @@
   "jdbc": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "jaydebeapi>=1.1.1"
     ],
     "cross-providers-deps": [
@@ -418,14 +418,14 @@
   },
   "jenkins": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "python-jenkins>=1.0.0"
     ],
     "cross-providers-deps": []
   },
   "microsoft.azure": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "azure-batch>=8.0.0",
       "azure-cosmos>=4.0.0",
       "azure-datalake-store>=0.0.45",
@@ -451,7 +451,7 @@
   "microsoft.mssql": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pymssql>=2.1.5; platform_machine != \"aarch64\""
     ],
     "cross-providers-deps": [
@@ -466,14 +466,14 @@
   },
   "microsoft.winrm": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pywinrm>=0.4"
     ],
     "cross-providers-deps": []
   },
   "mongo": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "dnspython>=1.13.0",
       "pymongo>=3.6.0,<4.0.0"
     ],
@@ -482,7 +482,7 @@
   "mysql": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "mysql-connector-python>=8.0.11; platform_machine != \"aarch64\"",
       "mysqlclient>=1.3.6; platform_machine != \"aarch64\""
     ],
@@ -496,7 +496,7 @@
   },
   "neo4j": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "neo4j>=4.2.1"
     ],
     "cross-providers-deps": []
@@ -504,7 +504,7 @@
   "odbc": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pyodbc"
     ],
     "cross-providers-deps": [
@@ -513,13 +513,13 @@
   },
   "openfaas": {
     "deps": [
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": []
   },
   "opsgenie": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "opsgenie-sdk>=2.1.5"
     ],
     "cross-providers-deps": []
@@ -527,7 +527,7 @@
   "oracle": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "oracledb>=1.0.0"
     ],
     "cross-providers-deps": [
@@ -536,14 +536,14 @@
   },
   "pagerduty": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pdpyras>=4.1.2"
     ],
     "cross-providers-deps": []
   },
   "papermill": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "papermill[all]>=1.2.1",
       "scrapbook[all]"
     ],
@@ -551,7 +551,7 @@
   },
   "plexus": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "arrow>=0.16.0"
     ],
     "cross-providers-deps": []
@@ -559,7 +559,7 @@
   "postgres": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "psycopg2>=2.8.0"
     ],
     "cross-providers-deps": [
@@ -570,7 +570,7 @@
   "presto": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pandas>=0.17.1",
       "presto-python-client>=0.8.2"
     ],
@@ -582,7 +582,7 @@
   "qubole": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "qds-sdk>=1.10.4"
     ],
     "cross-providers-deps": [
@@ -591,14 +591,14 @@
   },
   "redis": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "redis~=3.2"
     ],
     "cross-providers-deps": []
   },
   "salesforce": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pandas>=0.17.1",
       "simple-salesforce>=1.0.0"
     ],
@@ -606,7 +606,7 @@
   },
   "samba": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "smbprotocol>=1.5.0"
     ],
     "cross-providers-deps": []
@@ -614,13 +614,13 @@
   "segment": {
     "deps": [
       "analytics-python>=1.2.9",
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": []
   },
   "sendgrid": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "sendgrid>=6.0.0"
     ],
     "cross-providers-deps": []
@@ -628,7 +628,7 @@
   "sftp": {
     "deps": [
       "apache-airflow-providers-ssh>=2.1.0",
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": [
       "ssh"
@@ -636,7 +636,7 @@
   },
   "singularity": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "spython>=0.0.56"
     ],
     "cross-providers-deps": []
@@ -644,7 +644,7 @@
   "slack": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.2.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "slack_sdk>=3.0.0"
     ],
     "cross-providers-deps": [
@@ -654,7 +654,7 @@
   "snowflake": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "snowflake-connector-python>=2.4.1",
       "snowflake-sqlalchemy>=1.1.0"
     ],
@@ -673,7 +673,7 @@
   },
   "ssh": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "paramiko>=2.6.0",
       "sshtunnel>=0.3.2"
     ],
@@ -681,20 +681,20 @@
   },
   "tableau": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "tableauserverclient"
     ],
     "cross-providers-deps": []
   },
   "tabular": {
     "deps": [
-      "apache-airflow>=2.2.0"
+      "apache-airflow>=2.3.0"
     ],
     "cross-providers-deps": []
   },
   "telegram": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "python-telegram-bot>=13.0"
     ],
     "cross-providers-deps": []
@@ -702,7 +702,7 @@
   "trino": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "pandas>=0.17.1",
       "trino>=0.301.0"
     ],
@@ -714,7 +714,7 @@
   "vertica": {
     "deps": [
       "apache-airflow-providers-common-sql>=1.3.0",
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "vertica-python>=0.5.1"
     ],
     "cross-providers-deps": [
@@ -723,14 +723,14 @@
   },
   "yandex": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "yandexcloud>=0.173.0"
     ],
     "cross-providers-deps": []
   },
   "zendesk": {
     "deps": [
-      "apache-airflow>=2.2.0",
+      "apache-airflow>=2.3.0",
       "zenpy>=2.0.24"
     ],
     "cross-providers-deps": []
diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt
index c53adc58ab..83e27cb53a 100644
--- a/images/breeze/output-commands-hash.txt
+++ b/images/breeze/output-commands-hash.txt
@@ -50,7 +50,7 @@ setup:version:d11da4c17a23179830079b646160149c
 setup:a6bccf7a73bfac49ce5b027c8900ea61
 shell:557ee58e7c70af052f5ea7a173b39137
 start-airflow:b089191910e9c3f2ffd9c0079cdf38c6
-static-checks:425cd78507278494e345fb7648260c24
+static-checks:8acd42315c78e91537634a54095ee3e6
 stop:8ebd8a42f1003495d37b884de5ac7ce6
 testing:docker-compose-tests:70c744105ff61025f25d93a2f1f427c2
 testing:helm-tests:35f7ecef86fd9c9dbad73f20ebd64496
diff --git a/images/breeze/output_static-checks.svg b/images/breeze/output_static-checks.svg
index cd1b21ae68..9ac481f8d6 100644
--- a/images/breeze/output_static-checks.svg
+++ b/images/breeze/output_static-checks.svg
@@ -19,249 +19,249 @@
         font-weight: 700;
     }
 
-    .terminal-1502996596-matrix {
+    .terminal-2065099438-matrix {
         font-family: Fira Code, monospace;
         font-size: 20px;
         line-height: 24.4px;
         font-variant-east-asian: full-width;
     }
 
-    .terminal-1502996596-title {
+    .terminal-2065099438-title {
         font-size: 18px;
         font-weight: bold;
         font-family: arial;
     }
 
-    .terminal-1502996596-r1 { fill: #c5c8c6;font-weight: bold }
-.terminal-1502996596-r2 { fill: #c5c8c6 }
-.terminal-1502996596-r3 { fill: #d0b344;font-weight: bold }
-.terminal-1502996596-r4 { fill: #868887 }
-.terminal-1502996596-r5 { fill: #68a0b3;font-weight: bold }
-.terminal-1502996596-r6 { fill: #98a84b;font-weight: bold }
-.terminal-1502996596-r7 { fill: #8d7b39 }
+    .terminal-2065099438-r1 { fill: #c5c8c6;font-weight: bold }
+.terminal-2065099438-r2 { fill: #c5c8c6 }
+.terminal-2065099438-r3 { fill: #d0b344;font-weight: bold }
+.terminal-2065099438-r4 { fill: #868887 }
+.terminal-2065099438-r5 { fill: #68a0b3;font-weight: bold }
+.terminal-2065099438-r6 { fill: #98a84b;font-weight: bold }
+.terminal-2065099438-r7 { fill: #8d7b39 }
     </style>
 
     <defs>
-    <clipPath id="terminal-1502996596-clip-terminal">
+    <clipPath id="terminal-2065099438-clip-terminal">
       <rect x="0" y="0" width="1463.0" height="1267.8" />
     </clipPath>
-    <clipPath id="terminal-1502996596-line-0">
+    <clipPath id="terminal-2065099438-line-0">
     <rect x="0" y="1.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-1">
+<clipPath id="terminal-2065099438-line-1">
     <rect x="0" y="25.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-2">
+<clipPath id="terminal-2065099438-line-2">
     <rect x="0" y="50.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-3">
+<clipPath id="terminal-2065099438-line-3">
     <rect x="0" y="74.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-4">
+<clipPath id="terminal-2065099438-line-4">
     <rect x="0" y="99.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-5">
+<clipPath id="terminal-2065099438-line-5">
     <rect x="0" y="123.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-6">
+<clipPath id="terminal-2065099438-line-6">
     <rect x="0" y="147.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-7">
+<clipPath id="terminal-2065099438-line-7">
     <rect x="0" y="172.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-8">
+<clipPath id="terminal-2065099438-line-8">
     <rect x="0" y="196.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-9">
+<clipPath id="terminal-2065099438-line-9">
     <rect x="0" y="221.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-10">
+<clipPath id="terminal-2065099438-line-10">
     <rect x="0" y="245.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-11">
+<clipPath id="terminal-2065099438-line-11">
     <rect x="0" y="269.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-12">
+<clipPath id="terminal-2065099438-line-12">
     <rect x="0" y="294.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-13">
+<clipPath id="terminal-2065099438-line-13">
     <rect x="0" y="318.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-14">
+<clipPath id="terminal-2065099438-line-14">
     <rect x="0" y="343.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-15">
+<clipPath id="terminal-2065099438-line-15">
     <rect x="0" y="367.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-16">
+<clipPath id="terminal-2065099438-line-16">
     <rect x="0" y="391.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-17">
+<clipPath id="terminal-2065099438-line-17">
     <rect x="0" y="416.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-18">
+<clipPath id="terminal-2065099438-line-18">
     <rect x="0" y="440.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-19">
+<clipPath id="terminal-2065099438-line-19">
     <rect x="0" y="465.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-20">
+<clipPath id="terminal-2065099438-line-20">
     <rect x="0" y="489.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-21">
+<clipPath id="terminal-2065099438-line-21">
     <rect x="0" y="513.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-22">
+<clipPath id="terminal-2065099438-line-22">
     <rect x="0" y="538.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-23">
+<clipPath id="terminal-2065099438-line-23">
     <rect x="0" y="562.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-24">
+<clipPath id="terminal-2065099438-line-24">
     <rect x="0" y="587.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-25">
+<clipPath id="terminal-2065099438-line-25">
     <rect x="0" y="611.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-26">
+<clipPath id="terminal-2065099438-line-26">
     <rect x="0" y="635.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-27">
+<clipPath id="terminal-2065099438-line-27">
     <rect x="0" y="660.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-28">
+<clipPath id="terminal-2065099438-line-28">
     <rect x="0" y="684.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-29">
+<clipPath id="terminal-2065099438-line-29">
     <rect x="0" y="709.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-30">
+<clipPath id="terminal-2065099438-line-30">
     <rect x="0" y="733.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-31">
+<clipPath id="terminal-2065099438-line-31">
     <rect x="0" y="757.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-32">
+<clipPath id="terminal-2065099438-line-32">
     <rect x="0" y="782.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-33">
+<clipPath id="terminal-2065099438-line-33">
     <rect x="0" y="806.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-34">
+<clipPath id="terminal-2065099438-line-34">
     <rect x="0" y="831.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-35">
+<clipPath id="terminal-2065099438-line-35">
     <rect x="0" y="855.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-36">
+<clipPath id="terminal-2065099438-line-36">
     <rect x="0" y="879.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-37">
+<clipPath id="terminal-2065099438-line-37">
     <rect x="0" y="904.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-38">
+<clipPath id="terminal-2065099438-line-38">
     <rect x="0" y="928.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-39">
+<clipPath id="terminal-2065099438-line-39">
     <rect x="0" y="953.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-40">
+<clipPath id="terminal-2065099438-line-40">
     <rect x="0" y="977.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-41">
+<clipPath id="terminal-2065099438-line-41">
     <rect x="0" y="1001.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-42">
+<clipPath id="terminal-2065099438-line-42">
     <rect x="0" y="1026.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-43">
+<clipPath id="terminal-2065099438-line-43">
     <rect x="0" y="1050.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-44">
+<clipPath id="terminal-2065099438-line-44">
     <rect x="0" y="1075.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-45">
+<clipPath id="terminal-2065099438-line-45">
     <rect x="0" y="1099.5" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-46">
+<clipPath id="terminal-2065099438-line-46">
     <rect x="0" y="1123.9" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-47">
+<clipPath id="terminal-2065099438-line-47">
     <rect x="0" y="1148.3" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-48">
+<clipPath id="terminal-2065099438-line-48">
     <rect x="0" y="1172.7" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-49">
+<clipPath id="terminal-2065099438-line-49">
     <rect x="0" y="1197.1" width="1464" height="24.65"/>
             </clipPath>
-<clipPath id="terminal-1502996596-line-50">
+<clipPath id="terminal-2065099438-line-50">
     <rect x="0" y="1221.5" width="1464" height="24.65"/>
             </clipPath>
     </defs>
 
-    <rect fill="#292929" stroke="rgba(255,255,255,0.35)" stroke-width="1" x="1" y="1" width="1480" height="1316.8" rx="8"/><text class="terminal-1502996596-title" fill="#c5c8c6" text-anchor="middle" x="740" y="27">Command:&#160;static-checks</text>
+    <rect fill="#292929" stroke="rgba(255,255,255,0.35)" stroke-width="1" x="1" y="1" width="1480" height="1316.8" rx="8"/><text class="terminal-2065099438-title" fill="#c5c8c6" text-anchor="middle" x="740" y="27">Command:&#160;static-checks</text>
             <g transform="translate(26,22)">
             <circle cx="0" cy="0" r="7" fill="#ff5f57"/>
             <circle cx="22" cy="0" r="7" fill="#febc2e"/>
             <circle cx="44" cy="0" r="7" fill="#28c840"/>
             </g>
         
-    <g transform="translate(9, 41)" clip-path="url(#terminal-1502996596-clip-terminal)">
+    <g transform="translate(9, 41)" clip-path="url(#terminal-2065099438-clip-terminal)">
     
-    <g class="terminal-1502996596-matrix">
-    <text class="terminal-1502996596-r2" x="1464" y="20" textLength="12.2" clip-path="url(#terminal-1502996596-line-0)">
-</text><text class="terminal-1502996596-r3" x="12.2" y="44.4" textLength="85.4" clip-path="url(#terminal-1502996596-line-1)">Usage:&#160;</text><text class="terminal-1502996596-r1" x="97.6" y="44.4" textLength="610" clip-path="url(#terminal-1502996596-line-1)">breeze&#160;static-checks&#160;[OPTIONS]&#160;[PRECOMMIT_ARGS]...</text><text class="terminal-1502996596-r2" x="1464" y="44.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-1)">
-</text><text class="terminal-1502996596-r2" x="1464" y="68.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-2)">
-</text><text class="terminal-1502996596-r2" x="12.2" y="93.2" textLength="219.6" clip-path="url(#terminal-1502996596-line-3)">Run&#160;static&#160;checks.</text><text class="terminal-1502996596-r2" x="1464" y="93.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-3)">
-</text><text class="terminal-1502996596-r2" x="1464" y="117.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-4)">
-</text><text class="terminal-1502996596-r4" x="0" y="142" textLength="24.4" clip-path="url(#terminal-1502996596-line-5)">╭─</text><text class="terminal-1502996596-r4" x="24.4" y="142" textLength="1415.2" clip-path="url(#terminal-1502996596-line-5)">&#160;Pre-commit&#160;flags&#160;──────────────────────────────────────────────────────────────────────────────────────────────────</text><text class="terminal-1502996596-r4" x="1439.6" y="142" textLength="24.4" clip-path="url(#terminal-150299 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="166.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-6)">│</text><text class="terminal-1502996596-r5" x="24.4" y="166.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-6)">-</text><text class="terminal-1502996596-r5" x="36.6" y="166.4" textLength="61" clip-path="url(#terminal-1502996596-line-6)">-type</text><text class="terminal-1502996596-r6" x="317.2" y="166.4" textLength="24.4" clip-path="url(#terminal-15029 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="190.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-7)">│</text><text class="terminal-1502996596-r7" x="366" y="190.8" textLength="1073.6" clip-path="url(#terminal-1502996596-line-7)">(all&#160;|&#160;black&#160;|&#160;blacken-docs&#160;|&#160;check-airflow-2-2-compatibility&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="215.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-8)">│</text><text class="terminal-1502996596-r7" x="366" y="215.2" textLength="1073.6" clip-path="url(#terminal-1502996596-line-8)">check-airflow-config-yaml-consistent&#160;|&#160;check-apache-license-rat&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="ter [...]
-</text><text class="terminal-1502996596-r4" x="0" y="239.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-9)">│</text><text class="terminal-1502996596-r7" x="366" y="239.6" textLength="1073.6" clip-path="url(#terminal-1502996596-line-9)">check-base-operator-partial-arguments&#160;|&#160;check-base-operator-usage&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="264" textLength="12.2" clip-path="url(#terminal-1502996596-line-10)">│</text><text class="terminal-1502996596-r7" x="366" y="264" textLength="1073.6" clip-path="url(#terminal-1502996596-line-10)">check-boring-cyborg-configuration&#160;|&#160;check-breeze-top-dependencies-limited&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="264" textLength=" [...]
-</text><text class="terminal-1502996596-r4" x="0" y="288.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-11)">│</text><text class="terminal-1502996596-r7" x="366" y="288.4" textLength="1073.6" clip-path="url(#terminal-1502996596-line-11)">check-builtin-literals&#160;|&#160;check-changelog-has-no-duplicates&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
-</text><text class="terminal-1502996596-r4" x="0" y="312.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-12)">│</text><text class="terminal-1502996596-r7" x="366" y="312.8" textLength="1073.6" clip-path="url(#terminal-1502996596-line-12)">check-core-deprecation-classes&#160;|&#160;check-daysago-import-from-utils&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="termin [...]
-</text><text class="terminal-1502996596-r4" x="0" y="337.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-13)">│</text><text class="terminal-1502996596-r7" x="366" y="337.2" textLength="1073.6" clip-path="url(#terminal-1502996596-line-13)">check-decorated-operator-implements-custom-name&#160;|&#160;check-docstring-param-types&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="337.2" textLength="12.2" clip-pat [...]
-</text><text class="terminal-1502996596-r4" x="0" y="361.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-14)">│</text><text class="terminal-1502996596-r7" x="366" y="361.6" textLength="1073.6" clip-path="url(#terminal-1502996596-line-14)">check-example-dags-urls&#160;|&#160;check-executables-have-shebangs&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="386" textLength="12.2" clip-path="url(#terminal-1502996596-line-15)">│</text><text class="terminal-1502996596-r7" x="366" y="386" textLength="1073.6" clip-path="url(#terminal-1502996596-line-15)">check-extra-packages-references&#160;|&#160;check-extras-order&#160;|&#160;check-for-inclusive-language&#160;|&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="386" textLength="12.2" clip-path="url(#terminal-150299659 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="410.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-16)">│</text><text class="terminal-1502996596-r7" x="366" y="410.4" textLength="1073.6" clip-path="url(#terminal-1502996596-line-16)">check-hooks-apply&#160;|&#160;check-incorrect-use-of-LoggingMixin&#160;|&#160;check-init-decorator-arguments</text><text class="terminal-1502996596-r4" x="1451.8" y="410.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-16)">│< [...]
-</text><text class="terminal-1502996596-r4" x="0" y="434.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-17)">│</text><text class="terminal-1502996596-r7" x="366" y="434.8" textLength="1073.6" clip-path="url(#terminal-1502996596-line-17)">|&#160;check-lazy-logging&#160;|&#160;check-merge-conflict&#160;|&#160;check-newsfragments-are-valid&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="434.8" t [...]
-</text><text class="terminal-1502996596-r4" x="0" y="459.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-18)">│</text><text class="terminal-1502996596-r7" x="366" y="459.2" textLength="1073.6" clip-path="url(#terminal-1502996596-line-18)">check-no-providers-in-core-examples&#160;|&#160;check-no-relative-imports&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="t [...]
-</text><text class="terminal-1502996596-r4" x="0" y="483.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-19)">│</text><text class="terminal-1502996596-r7" x="366" y="483.6" textLength="1073.6" clip-path="url(#terminal-1502996596-line-19)">check-persist-credentials-disabled-in-github-workflows&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="508" textLength="12.2" clip-path="url(#terminal-1502996596-line-20)">│</text><text class="terminal-1502996596-r7" x="366" y="508" textLength="1073.6" clip-path="url(#terminal-1502996596-line-20)">check-pre-commit-information-consistent&#160;|&#160;check-provide-create-sessions-imports&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="508" textLength="12.2" clip-path="url(#terminal [...]
-</text><text class="terminal-1502996596-r4" x="0" y="532.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-21)">│</text><text class="terminal-1502996596-r7" x="366" y="532.4" textLength="1073.6" clip-path="url(#terminal-1502996596-line-21)">check-provider-yaml-valid&#160;|&#160;check-providers-init-file-missing&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><tex [...]
-</text><text class="terminal-1502996596-r4" x="0" y="556.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-22)">│</text><text class="terminal-1502996596-r7" x="366" y="556.8" textLength="1073.6" clip-path="url(#terminal-1502996596-line-22)">check-providers-subpackages-init-file-exist&#160;|&#160;check-pydevd-left-in-code&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="556 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="581.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-23)">│</text><text class="terminal-1502996596-r7" x="366" y="581.2" textLength="1073.6" clip-path="url(#terminal-1502996596-line-23)">check-revision-heads-map&#160;|&#160;check-safe-filter-usage-in-html&#160;|&#160;check-setup-order&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="581.2" textLength="12.2" cli [...]
-</text><text class="terminal-1502996596-r4" x="0" y="605.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-24)">│</text><text class="terminal-1502996596-r7" x="366" y="605.6" textLength="1073.6" clip-path="url(#terminal-1502996596-line-24)">check-start-date-not-used-in-defaults&#160;|&#160;check-system-tests-present&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="630" textLength="12.2" clip-path="url(#terminal-1502996596-line-25)">│</text><text class="terminal-1502996596-r7" x="366" y="630" textLength="1073.6" clip-path="url(#terminal-1502996596-line-25)">check-system-tests-tocs&#160;|&#160;check-xml&#160;|&#160;codespell&#160;|&#160;compile-www-assets&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="654.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-26)">│</text><text class="terminal-1502996596-r7" x="366" y="654.4" textLength="1073.6" clip-path="url(#terminal-1502996596-line-26)">compile-www-assets-dev&#160;|&#160;create-missing-init-py-files-tests&#160;|&#160;debug-statements&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="654.4" textLength="12.2" cli [...]
-</text><text class="terminal-1502996596-r4" x="0" y="678.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-27)">│</text><text class="terminal-1502996596-r7" x="366" y="678.8" textLength="1073.6" clip-path="url(#terminal-1502996596-line-27)">detect-private-key&#160;|&#160;doctoc&#160;|&#160;end-of-file-fixer&#160;|&#160;fix-encoding-pragma&#160;|&#160;flynt&#160;|&#160;identity</text><text class="terminal-1502996596-r4" x="1451.8" y="678.8" textLength="12.2" clip-path="url(#te [...]
-</text><text class="terminal-1502996596-r4" x="0" y="703.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-28)">│</text><text class="terminal-1502996596-r7" x="366" y="703.2" textLength="1073.6" clip-path="url(#terminal-1502996596-line-28)">|&#160;insert-license&#160;|&#160;isort&#160;|&#160;lint-chart-schema&#160;|&#160;lint-css&#160;|&#160;lint-dockerfile&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="727.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-29)">│</text><text class="terminal-1502996596-r7" x="366" y="727.6" textLength="1073.6" clip-path="url(#terminal-1502996596-line-29)">lint-helm-chart&#160;|&#160;lint-json-schema&#160;|&#160;lint-markdown&#160;|&#160;lint-openapi&#160;|&#160;mixed-line-ending&#160;|&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="727.6" textLength="12.2" clip-path="url(#te [...]
-</text><text class="terminal-1502996596-r4" x="0" y="752" textLength="12.2" clip-path="url(#terminal-1502996596-line-30)">│</text><text class="terminal-1502996596-r7" x="366" y="752" textLength="1073.6" clip-path="url(#terminal-1502996596-line-30)">pretty-format-json&#160;|&#160;pydocstyle&#160;|&#160;python-no-log-warn&#160;|&#160;pyupgrade&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><te [...]
-</text><text class="terminal-1502996596-r4" x="0" y="776.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-31)">│</text><text class="terminal-1502996596-r7" x="366" y="776.4" textLength="1073.6" clip-path="url(#terminal-1502996596-line-31)">replace-bad-characters&#160;|&#160;rst-backticks&#160;|&#160;run-flake8&#160;|&#160;run-mypy&#160;|&#160;run-shellcheck&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="776.4" textLe [...]
-</text><text class="terminal-1502996596-r4" x="0" y="800.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-32)">│</text><text class="terminal-1502996596-r7" x="366" y="800.8" textLength="1073.6" clip-path="url(#terminal-1502996596-line-32)">static-check-autoflake&#160;|&#160;trailing-whitespace&#160;|&#160;ts-compile-and-lint-javascript&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="800.8" textLength="12.2 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="825.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-33)">│</text><text class="terminal-1502996596-r7" x="366" y="825.2" textLength="1073.6" clip-path="url(#terminal-1502996596-line-33)">update-breeze-cmd-output&#160;|&#160;update-breeze-readme-config-hash&#160;|&#160;update-er-diagram&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="825.2" textLength="12.2" clip-pat [...]
-</text><text class="terminal-1502996596-r4" x="0" y="849.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-34)">│</text><text class="terminal-1502996596-r7" x="366" y="849.6" textLength="1073.6" clip-path="url(#terminal-1502996596-line-34)">update-extras&#160;|&#160;update-in-the-wild-to-be-sorted&#160;|&#160;update-inlined-dockerfile-scripts&#160;|&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="849.6" textLength="12.2" clip-path="url(#terminal-150 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="874" textLength="12.2" clip-path="url(#terminal-1502996596-line-35)">│</text><text class="terminal-1502996596-r7" x="366" y="874" textLength="1073.6" clip-path="url(#terminal-1502996596-line-35)">update-local-yml-file&#160;|&#160;update-migration-references&#160;|&#160;update-providers-dependencies&#160;|&#160;&#160;&#160;</text><text class="terminal-1502996596-r4" x="1451.8" y="874" textLength="12.2" clip-path="url(#terminal-150299659 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="898.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-36)">│</text><text class="terminal-1502996596-r7" x="366" y="898.4" textLength="1073.6" clip-path="url(#terminal-1502996596-line-36)">update-spelling-wordlist-to-be-sorted&#160;|&#160;update-supported-versions&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-15 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="922.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-37)">│</text><text class="terminal-1502996596-r7" x="366" y="922.8" textLength="1073.6" clip-path="url(#terminal-1502996596-line-37)">update-vendored-in-k8s-json-schema&#160;|&#160;update-version&#160;|&#160;yamllint&#160;|&#160;yesqa)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-1502996 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="947.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-38)">│</text><text class="terminal-1502996596-r5" x="24.4" y="947.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-38)">-</text><text class="terminal-1502996596-r5" x="36.6" y="947.2" textLength="61" clip-path="url(#terminal-1502996596-line-38)">-file</text><text class="terminal-1502996596-r6" x="317.2" y="947.2" textLength="24.4" clip-path="url(#terminal-15 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="971.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-39)">│</text><text class="terminal-1502996596-r5" x="24.4" y="971.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-39)">-</text><text class="terminal-1502996596-r5" x="36.6" y="971.6" textLength="48.8" clip-path="url(#terminal-1502996596-line-39)">-all</text><text class="terminal-1502996596-r5" x="85.4" y="971.6" textLength="73.2" clip-path="url(#terminal-15 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="996" textLength="12.2" clip-path="url(#terminal-1502996596-line-40)">│</text><text class="terminal-1502996596-r5" x="24.4" y="996" textLength="12.2" clip-path="url(#terminal-1502996596-line-40)">-</text><text class="terminal-1502996596-r5" x="36.6" y="996" textLength="61" clip-path="url(#terminal-1502996596-line-40)">-show</text><text class="terminal-1502996596-r5" x="97.6" y="996" textLength="195.2" clip-path="url(#terminal-1502996596 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1020.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-41)">│</text><text class="terminal-1502996596-r5" x="24.4" y="1020.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-41)">-</text><text class="terminal-1502996596-r5" x="36.6" y="1020.4" textLength="61" clip-path="url(#terminal-1502996596-line-41)">-last</text><text class="terminal-1502996596-r5" x="97.6" y="1020.4" textLength="85.4" clip-path="url(#terminal [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1044.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-42)">│</text><text class="terminal-1502996596-r5" x="24.4" y="1044.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-42)">-</text><text class="terminal-1502996596-r5" x="36.6" y="1044.8" textLength="85.4" clip-path="url(#terminal-1502996596-line-42)">-commit</text><text class="terminal-1502996596-r5" x="122" y="1044.8" textLength="48.8" clip-path="url(#termi [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1069.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-43)">│</text><text class="terminal-1502996596-r2" x="366" y="1069.2" textLength="292.8" clip-path="url(#terminal-1502996596-line-43)">Mutually&#160;exclusive&#160;with&#160;</text><text class="terminal-1502996596-r5" x="658.8" y="1069.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-43)">-</text><text class="terminal-1502996596-r5" x="671" y="1069.2" textLe [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1093.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-44)">│</text><text class="terminal-1502996596-r7" x="366" y="1093.6" textLength="1073.6" clip-path="url(#terminal-1502996596-line-44)">(TEXT)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1118" textLength="1464" clip-path="url(#terminal-1502996596-line-45)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="terminal-1502996596-r2" x="1464" y="1118" textLength="12.2" clip-path="url(#terminal-1502996596-line-45)">
-</text><text class="terminal-1502996596-r4" x="0" y="1142.4" textLength="24.4" clip-path="url(#terminal-1502996596-line-46)">╭─</text><text class="terminal-1502996596-r4" x="24.4" y="1142.4" textLength="1415.2" clip-path="url(#terminal-1502996596-line-46)">&#160;Common&#160;options&#160;────────────────────────────────────────────────────────────────────────────────────────────────────</text><text class="terminal-1502996596-r4" x="1439.6" y="1142.4" textLength="24.4" clip-path="url(#term [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1166.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-47)">│</text><text class="terminal-1502996596-r5" x="24.4" y="1166.8" textLength="12.2" clip-path="url(#terminal-1502996596-line-47)">-</text><text class="terminal-1502996596-r5" x="36.6" y="1166.8" textLength="97.6" clip-path="url(#terminal-1502996596-line-47)">-verbose</text><text class="terminal-1502996596-r6" x="280.6" y="1166.8" textLength="24.4" clip-path="url(#te [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1191.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-48)">│</text><text class="terminal-1502996596-r5" x="24.4" y="1191.2" textLength="12.2" clip-path="url(#terminal-1502996596-line-48)">-</text><text class="terminal-1502996596-r5" x="36.6" y="1191.2" textLength="48.8" clip-path="url(#terminal-1502996596-line-48)">-dry</text><text class="terminal-1502996596-r5" x="85.4" y="1191.2" textLength="48.8" clip-path="url(#termina [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1215.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-49)">│</text><text class="terminal-1502996596-r5" x="24.4" y="1215.6" textLength="12.2" clip-path="url(#terminal-1502996596-line-49)">-</text><text class="terminal-1502996596-r5" x="36.6" y="1215.6" textLength="85.4" clip-path="url(#terminal-1502996596-line-49)">-github</text><text class="terminal-1502996596-r5" x="122" y="1215.6" textLength="134.2" clip-path="url(#term [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1240" textLength="12.2" clip-path="url(#terminal-1502996596-line-50)">│</text><text class="terminal-1502996596-r5" x="24.4" y="1240" textLength="12.2" clip-path="url(#terminal-1502996596-line-50)">-</text><text class="terminal-1502996596-r5" x="36.6" y="1240" textLength="61" clip-path="url(#terminal-1502996596-line-50)">-help</text><text class="terminal-1502996596-r6" x="280.6" y="1240" textLength="24.4" clip-path="url(#terminal-150299 [...]
-</text><text class="terminal-1502996596-r4" x="0" y="1264.4" textLength="1464" clip-path="url(#terminal-1502996596-line-51)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="terminal-1502996596-r2" x="1464" y="1264.4" textLength="12.2" clip-path="url(#terminal-1502996596-line-51)">
+    <g class="terminal-2065099438-matrix">
+    <text class="terminal-2065099438-r2" x="1464" y="20" textLength="12.2" clip-path="url(#terminal-2065099438-line-0)">
+</text><text class="terminal-2065099438-r3" x="12.2" y="44.4" textLength="85.4" clip-path="url(#terminal-2065099438-line-1)">Usage:&#160;</text><text class="terminal-2065099438-r1" x="97.6" y="44.4" textLength="610" clip-path="url(#terminal-2065099438-line-1)">breeze&#160;static-checks&#160;[OPTIONS]&#160;[PRECOMMIT_ARGS]...</text><text class="terminal-2065099438-r2" x="1464" y="44.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-1)">
+</text><text class="terminal-2065099438-r2" x="1464" y="68.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-2)">
+</text><text class="terminal-2065099438-r2" x="12.2" y="93.2" textLength="219.6" clip-path="url(#terminal-2065099438-line-3)">Run&#160;static&#160;checks.</text><text class="terminal-2065099438-r2" x="1464" y="93.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-3)">
+</text><text class="terminal-2065099438-r2" x="1464" y="117.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-4)">
+</text><text class="terminal-2065099438-r4" x="0" y="142" textLength="24.4" clip-path="url(#terminal-2065099438-line-5)">╭─</text><text class="terminal-2065099438-r4" x="24.4" y="142" textLength="1415.2" clip-path="url(#terminal-2065099438-line-5)">&#160;Pre-commit&#160;flags&#160;──────────────────────────────────────────────────────────────────────────────────────────────────</text><text class="terminal-2065099438-r4" x="1439.6" y="142" textLength="24.4" clip-path="url(#terminal-206509 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="166.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-6)">│</text><text class="terminal-2065099438-r5" x="24.4" y="166.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-6)">-</text><text class="terminal-2065099438-r5" x="36.6" y="166.4" textLength="61" clip-path="url(#terminal-2065099438-line-6)">-type</text><text class="terminal-2065099438-r6" x="317.2" y="166.4" textLength="24.4" clip-path="url(#terminal-20650 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="190.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-7)">│</text><text class="terminal-2065099438-r7" x="366" y="190.8" textLength="1073.6" clip-path="url(#terminal-2065099438-line-7)">(all&#160;|&#160;black&#160;|&#160;blacken-docs&#160;|&#160;check-airflow-config-yaml-consistent&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class [...]
+</text><text class="terminal-2065099438-r4" x="0" y="215.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-8)">│</text><text class="terminal-2065099438-r7" x="366" y="215.2" textLength="1073.6" clip-path="url(#terminal-2065099438-line-8)">check-airflow-provider-compatibility&#160;|&#160;check-apache-license-rat&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="ter [...]
+</text><text class="terminal-2065099438-r4" x="0" y="239.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-9)">│</text><text class="terminal-2065099438-r7" x="366" y="239.6" textLength="1073.6" clip-path="url(#terminal-2065099438-line-9)">check-base-operator-partial-arguments&#160;|&#160;check-base-operator-usage&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="264" textLength="12.2" clip-path="url(#terminal-2065099438-line-10)">│</text><text class="terminal-2065099438-r7" x="366" y="264" textLength="1073.6" clip-path="url(#terminal-2065099438-line-10)">check-boring-cyborg-configuration&#160;|&#160;check-breeze-top-dependencies-limited&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="264" textLength=" [...]
+</text><text class="terminal-2065099438-r4" x="0" y="288.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-11)">│</text><text class="terminal-2065099438-r7" x="366" y="288.4" textLength="1073.6" clip-path="url(#terminal-2065099438-line-11)">check-builtin-literals&#160;|&#160;check-changelog-has-no-duplicates&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
+</text><text class="terminal-2065099438-r4" x="0" y="312.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-12)">│</text><text class="terminal-2065099438-r7" x="366" y="312.8" textLength="1073.6" clip-path="url(#terminal-2065099438-line-12)">check-core-deprecation-classes&#160;|&#160;check-daysago-import-from-utils&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="termin [...]
+</text><text class="terminal-2065099438-r4" x="0" y="337.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-13)">│</text><text class="terminal-2065099438-r7" x="366" y="337.2" textLength="1073.6" clip-path="url(#terminal-2065099438-line-13)">check-decorated-operator-implements-custom-name&#160;|&#160;check-docstring-param-types&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="337.2" textLength="12.2" clip-pat [...]
+</text><text class="terminal-2065099438-r4" x="0" y="361.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-14)">│</text><text class="terminal-2065099438-r7" x="366" y="361.6" textLength="1073.6" clip-path="url(#terminal-2065099438-line-14)">check-example-dags-urls&#160;|&#160;check-executables-have-shebangs&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="386" textLength="12.2" clip-path="url(#terminal-2065099438-line-15)">│</text><text class="terminal-2065099438-r7" x="366" y="386" textLength="1073.6" clip-path="url(#terminal-2065099438-line-15)">check-extra-packages-references&#160;|&#160;check-extras-order&#160;|&#160;check-for-inclusive-language&#160;|&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="386" textLength="12.2" clip-path="url(#terminal-206509943 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="410.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-16)">│</text><text class="terminal-2065099438-r7" x="366" y="410.4" textLength="1073.6" clip-path="url(#terminal-2065099438-line-16)">check-hooks-apply&#160;|&#160;check-incorrect-use-of-LoggingMixin&#160;|&#160;check-init-decorator-arguments</text><text class="terminal-2065099438-r4" x="1451.8" y="410.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-16)">│< [...]
+</text><text class="terminal-2065099438-r4" x="0" y="434.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-17)">│</text><text class="terminal-2065099438-r7" x="366" y="434.8" textLength="1073.6" clip-path="url(#terminal-2065099438-line-17)">|&#160;check-lazy-logging&#160;|&#160;check-merge-conflict&#160;|&#160;check-newsfragments-are-valid&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="434.8" t [...]
+</text><text class="terminal-2065099438-r4" x="0" y="459.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-18)">│</text><text class="terminal-2065099438-r7" x="366" y="459.2" textLength="1073.6" clip-path="url(#terminal-2065099438-line-18)">check-no-providers-in-core-examples&#160;|&#160;check-no-relative-imports&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="t [...]
+</text><text class="terminal-2065099438-r4" x="0" y="483.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-19)">│</text><text class="terminal-2065099438-r7" x="366" y="483.6" textLength="1073.6" clip-path="url(#terminal-2065099438-line-19)">check-persist-credentials-disabled-in-github-workflows&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="508" textLength="12.2" clip-path="url(#terminal-2065099438-line-20)">│</text><text class="terminal-2065099438-r7" x="366" y="508" textLength="1073.6" clip-path="url(#terminal-2065099438-line-20)">check-pre-commit-information-consistent&#160;|&#160;check-provide-create-sessions-imports&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="508" textLength="12.2" clip-path="url(#terminal [...]
+</text><text class="terminal-2065099438-r4" x="0" y="532.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-21)">│</text><text class="terminal-2065099438-r7" x="366" y="532.4" textLength="1073.6" clip-path="url(#terminal-2065099438-line-21)">check-provider-yaml-valid&#160;|&#160;check-providers-init-file-missing&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><tex [...]
+</text><text class="terminal-2065099438-r4" x="0" y="556.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-22)">│</text><text class="terminal-2065099438-r7" x="366" y="556.8" textLength="1073.6" clip-path="url(#terminal-2065099438-line-22)">check-providers-subpackages-init-file-exist&#160;|&#160;check-pydevd-left-in-code&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="556 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="581.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-23)">│</text><text class="terminal-2065099438-r7" x="366" y="581.2" textLength="1073.6" clip-path="url(#terminal-2065099438-line-23)">check-revision-heads-map&#160;|&#160;check-safe-filter-usage-in-html&#160;|&#160;check-setup-order&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="581.2" textLength="12.2" cli [...]
+</text><text class="terminal-2065099438-r4" x="0" y="605.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-24)">│</text><text class="terminal-2065099438-r7" x="366" y="605.6" textLength="1073.6" clip-path="url(#terminal-2065099438-line-24)">check-start-date-not-used-in-defaults&#160;|&#160;check-system-tests-present&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="630" textLength="12.2" clip-path="url(#terminal-2065099438-line-25)">│</text><text class="terminal-2065099438-r7" x="366" y="630" textLength="1073.6" clip-path="url(#terminal-2065099438-line-25)">check-system-tests-tocs&#160;|&#160;check-xml&#160;|&#160;codespell&#160;|&#160;compile-www-assets&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="654.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-26)">│</text><text class="terminal-2065099438-r7" x="366" y="654.4" textLength="1073.6" clip-path="url(#terminal-2065099438-line-26)">compile-www-assets-dev&#160;|&#160;create-missing-init-py-files-tests&#160;|&#160;debug-statements&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="654.4" textLength="12.2" cli [...]
+</text><text class="terminal-2065099438-r4" x="0" y="678.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-27)">│</text><text class="terminal-2065099438-r7" x="366" y="678.8" textLength="1073.6" clip-path="url(#terminal-2065099438-line-27)">detect-private-key&#160;|&#160;doctoc&#160;|&#160;end-of-file-fixer&#160;|&#160;fix-encoding-pragma&#160;|&#160;flynt&#160;|&#160;identity</text><text class="terminal-2065099438-r4" x="1451.8" y="678.8" textLength="12.2" clip-path="url(#te [...]
+</text><text class="terminal-2065099438-r4" x="0" y="703.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-28)">│</text><text class="terminal-2065099438-r7" x="366" y="703.2" textLength="1073.6" clip-path="url(#terminal-2065099438-line-28)">|&#160;insert-license&#160;|&#160;isort&#160;|&#160;lint-chart-schema&#160;|&#160;lint-css&#160;|&#160;lint-dockerfile&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="727.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-29)">│</text><text class="terminal-2065099438-r7" x="366" y="727.6" textLength="1073.6" clip-path="url(#terminal-2065099438-line-29)">lint-helm-chart&#160;|&#160;lint-json-schema&#160;|&#160;lint-markdown&#160;|&#160;lint-openapi&#160;|&#160;mixed-line-ending&#160;|&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="727.6" textLength="12.2" clip-path="url(#te [...]
+</text><text class="terminal-2065099438-r4" x="0" y="752" textLength="12.2" clip-path="url(#terminal-2065099438-line-30)">│</text><text class="terminal-2065099438-r7" x="366" y="752" textLength="1073.6" clip-path="url(#terminal-2065099438-line-30)">pretty-format-json&#160;|&#160;pydocstyle&#160;|&#160;python-no-log-warn&#160;|&#160;pyupgrade&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><te [...]
+</text><text class="terminal-2065099438-r4" x="0" y="776.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-31)">│</text><text class="terminal-2065099438-r7" x="366" y="776.4" textLength="1073.6" clip-path="url(#terminal-2065099438-line-31)">replace-bad-characters&#160;|&#160;rst-backticks&#160;|&#160;run-flake8&#160;|&#160;run-mypy&#160;|&#160;run-shellcheck&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="776.4" textLe [...]
+</text><text class="terminal-2065099438-r4" x="0" y="800.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-32)">│</text><text class="terminal-2065099438-r7" x="366" y="800.8" textLength="1073.6" clip-path="url(#terminal-2065099438-line-32)">static-check-autoflake&#160;|&#160;trailing-whitespace&#160;|&#160;ts-compile-and-lint-javascript&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="800.8" textLength="12.2 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="825.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-33)">│</text><text class="terminal-2065099438-r7" x="366" y="825.2" textLength="1073.6" clip-path="url(#terminal-2065099438-line-33)">update-breeze-cmd-output&#160;|&#160;update-breeze-readme-config-hash&#160;|&#160;update-er-diagram&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="825.2" textLength="12.2" clip-pat [...]
+</text><text class="terminal-2065099438-r4" x="0" y="849.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-34)">│</text><text class="terminal-2065099438-r7" x="366" y="849.6" textLength="1073.6" clip-path="url(#terminal-2065099438-line-34)">update-extras&#160;|&#160;update-in-the-wild-to-be-sorted&#160;|&#160;update-inlined-dockerfile-scripts&#160;|&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="849.6" textLength="12.2" clip-path="url(#terminal-206 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="874" textLength="12.2" clip-path="url(#terminal-2065099438-line-35)">│</text><text class="terminal-2065099438-r7" x="366" y="874" textLength="1073.6" clip-path="url(#terminal-2065099438-line-35)">update-local-yml-file&#160;|&#160;update-migration-references&#160;|&#160;update-providers-dependencies&#160;|&#160;&#160;&#160;</text><text class="terminal-2065099438-r4" x="1451.8" y="874" textLength="12.2" clip-path="url(#terminal-206509943 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="898.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-36)">│</text><text class="terminal-2065099438-r7" x="366" y="898.4" textLength="1073.6" clip-path="url(#terminal-2065099438-line-36)">update-spelling-wordlist-to-be-sorted&#160;|&#160;update-supported-versions&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-20 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="922.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-37)">│</text><text class="terminal-2065099438-r7" x="366" y="922.8" textLength="1073.6" clip-path="url(#terminal-2065099438-line-37)">update-vendored-in-k8s-json-schema&#160;|&#160;update-version&#160;|&#160;yamllint&#160;|&#160;yesqa)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="terminal-2065099 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="947.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-38)">│</text><text class="terminal-2065099438-r5" x="24.4" y="947.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-38)">-</text><text class="terminal-2065099438-r5" x="36.6" y="947.2" textLength="61" clip-path="url(#terminal-2065099438-line-38)">-file</text><text class="terminal-2065099438-r6" x="317.2" y="947.2" textLength="24.4" clip-path="url(#terminal-20 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="971.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-39)">│</text><text class="terminal-2065099438-r5" x="24.4" y="971.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-39)">-</text><text class="terminal-2065099438-r5" x="36.6" y="971.6" textLength="48.8" clip-path="url(#terminal-2065099438-line-39)">-all</text><text class="terminal-2065099438-r5" x="85.4" y="971.6" textLength="73.2" clip-path="url(#terminal-20 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="996" textLength="12.2" clip-path="url(#terminal-2065099438-line-40)">│</text><text class="terminal-2065099438-r5" x="24.4" y="996" textLength="12.2" clip-path="url(#terminal-2065099438-line-40)">-</text><text class="terminal-2065099438-r5" x="36.6" y="996" textLength="61" clip-path="url(#terminal-2065099438-line-40)">-show</text><text class="terminal-2065099438-r5" x="97.6" y="996" textLength="195.2" clip-path="url(#terminal-2065099438 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1020.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-41)">│</text><text class="terminal-2065099438-r5" x="24.4" y="1020.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-41)">-</text><text class="terminal-2065099438-r5" x="36.6" y="1020.4" textLength="61" clip-path="url(#terminal-2065099438-line-41)">-last</text><text class="terminal-2065099438-r5" x="97.6" y="1020.4" textLength="85.4" clip-path="url(#terminal [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1044.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-42)">│</text><text class="terminal-2065099438-r5" x="24.4" y="1044.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-42)">-</text><text class="terminal-2065099438-r5" x="36.6" y="1044.8" textLength="85.4" clip-path="url(#terminal-2065099438-line-42)">-commit</text><text class="terminal-2065099438-r5" x="122" y="1044.8" textLength="48.8" clip-path="url(#termi [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1069.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-43)">│</text><text class="terminal-2065099438-r2" x="366" y="1069.2" textLength="292.8" clip-path="url(#terminal-2065099438-line-43)">Mutually&#160;exclusive&#160;with&#160;</text><text class="terminal-2065099438-r5" x="658.8" y="1069.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-43)">-</text><text class="terminal-2065099438-r5" x="671" y="1069.2" textLe [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1093.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-44)">│</text><text class="terminal-2065099438-r7" x="366" y="1093.6" textLength="1073.6" clip-path="url(#terminal-2065099438-line-44)">(TEXT)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1118" textLength="1464" clip-path="url(#terminal-2065099438-line-45)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="terminal-2065099438-r2" x="1464" y="1118" textLength="12.2" clip-path="url(#terminal-2065099438-line-45)">
+</text><text class="terminal-2065099438-r4" x="0" y="1142.4" textLength="24.4" clip-path="url(#terminal-2065099438-line-46)">╭─</text><text class="terminal-2065099438-r4" x="24.4" y="1142.4" textLength="1415.2" clip-path="url(#terminal-2065099438-line-46)">&#160;Common&#160;options&#160;────────────────────────────────────────────────────────────────────────────────────────────────────</text><text class="terminal-2065099438-r4" x="1439.6" y="1142.4" textLength="24.4" clip-path="url(#term [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1166.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-47)">│</text><text class="terminal-2065099438-r5" x="24.4" y="1166.8" textLength="12.2" clip-path="url(#terminal-2065099438-line-47)">-</text><text class="terminal-2065099438-r5" x="36.6" y="1166.8" textLength="97.6" clip-path="url(#terminal-2065099438-line-47)">-verbose</text><text class="terminal-2065099438-r6" x="280.6" y="1166.8" textLength="24.4" clip-path="url(#te [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1191.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-48)">│</text><text class="terminal-2065099438-r5" x="24.4" y="1191.2" textLength="12.2" clip-path="url(#terminal-2065099438-line-48)">-</text><text class="terminal-2065099438-r5" x="36.6" y="1191.2" textLength="48.8" clip-path="url(#terminal-2065099438-line-48)">-dry</text><text class="terminal-2065099438-r5" x="85.4" y="1191.2" textLength="48.8" clip-path="url(#termina [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1215.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-49)">│</text><text class="terminal-2065099438-r5" x="24.4" y="1215.6" textLength="12.2" clip-path="url(#terminal-2065099438-line-49)">-</text><text class="terminal-2065099438-r5" x="36.6" y="1215.6" textLength="85.4" clip-path="url(#terminal-2065099438-line-49)">-github</text><text class="terminal-2065099438-r5" x="122" y="1215.6" textLength="134.2" clip-path="url(#term [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1240" textLength="12.2" clip-path="url(#terminal-2065099438-line-50)">│</text><text class="terminal-2065099438-r5" x="24.4" y="1240" textLength="12.2" clip-path="url(#terminal-2065099438-line-50)">-</text><text class="terminal-2065099438-r5" x="36.6" y="1240" textLength="61" clip-path="url(#terminal-2065099438-line-50)">-help</text><text class="terminal-2065099438-r6" x="280.6" y="1240" textLength="24.4" clip-path="url(#terminal-206509 [...]
+</text><text class="terminal-2065099438-r4" x="0" y="1264.4" textLength="1464" clip-path="url(#terminal-2065099438-line-51)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="terminal-2065099438-r2" x="1464" y="1264.4" textLength="12.2" clip-path="url(#terminal-2065099438-line-51)">
 </text>
     </g>
     </g>
diff --git a/scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py b/scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py
deleted file mode 100755
index 86e746ff8c..0000000000
--- a/scripts/ci/pre_commit/pre_commit_check_2_2_compatibility.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-from __future__ import annotations
-
-import re
-import sys
-from pathlib import Path
-
-from rich.console import Console
-
-if __name__ not in ("__main__", "__mp_main__"):
-    raise SystemExit(
-        "This file is intended to be executed as an executable program. You cannot use it as a module."
-        f"To run this script, run the ./{__file__} command [FILE] ..."
-    )
-
-console = Console(color_system="standard", width=200)
-
-errors: list[str] = []
-
-SKIP_COMP_CHECK = "# ignore airflow compat check"
-TRY_NUM_MATCHER = re.compile(r".*context.*\[[\"']try_number[\"']].*")
-GET_MANDATORY_MATCHER = re.compile(r".*conf\.get_mandatory_value")
-GET_AIRFLOW_APP_MATCHER = re.compile(r".*get_airflow_app\(\)")
-HOOK_PARAMS_MATCHER = re.compile(r".*get_hook\(hook_params")
-
-
-def _check_file(_file: Path):
-    lines = _file.read_text().splitlines()
-
-    for index, line in enumerate(lines):
-        if SKIP_COMP_CHECK in line:
-            continue
-
-        if "XCom.get_value(" in line:
-            if "if ti_key is not None:" not in lines[index - 1]:
-                errors.append(
-                    f"[red]In {_file}:{index} there is a forbidden construct "
-                    "(Airflow 2.3.0 only):[/]\n\n"
-                    f"{lines[index-1]}\n{lines[index]}\n\n"
-                    "[yellow]When you use XCom.get_value( in providers, it should be in the form:[/]\n\n"
-                    "if ti_key is not None:\n"
-                    "    value = XCom.get_value(...., ti_key=ti_key)\n\n"
-                    "See: https://airflow.apache.org/docs/apache-airflow-providers/"
-                    "howto/create-update-providers.html#using-providers-with-dynamic-task-mapping\n"
-                )
-        if "ti.map_index" in line:
-            errors.append(
-                f"[red]In {_file}:{index} there is a forbidden construct "
-                "(Airflow 2.3+ only):[/]\n\n"
-                f"{lines[index]}\n\n"
-                "[yellow]You should not use map_index field in providers "
-                "as it is only available in Airflow 2.3+[/]"
-            )
-
-        if TRY_NUM_MATCHER.match(line):
-            errors.append(
-                f"[red]In {_file}:{index} there is a forbidden construct "
-                "(Airflow 2.3+ only):[/]\n\n"
-                f"{lines[index]}\n\n"
-                "[yellow]You should not expect try_number field for context in providers "
-                "as it is only available in Airflow 2.3+[/]"
-            )
-
-        if GET_MANDATORY_MATCHER.match(line):
-            errors.append(
-                f"[red]In {_file}:{index} there is a forbidden construct "
-                "(Airflow 2.3+ only):[/]\n\n"
-                f"{lines[index]}\n\n"
-                "[yellow]You should not use conf.get_mandatory_value in providers "
-                "as it is only available in Airflow 2.3+[/]"
-            )
-
-        if HOOK_PARAMS_MATCHER.match(line):
-            errors.append(
-                f"[red]In {_file}:{index} there is a forbidden construct "
-                "(Airflow 2.3+ only):[/]\n\n"
-                f"{lines[index]}\n\n"
-                "[yellow]You should not use 'hook_params' in get_hook as it has been added in providers "
-                "as it is not available in Airflow 2.3+. Use get_hook() instead.[/]"
-            )
-
-        if GET_AIRFLOW_APP_MATCHER.match(line):
-            errors.append(
-                f"[red]In {_file}:{index} there is a forbidden construct "
-                "(Airflow 2.4+ only):[/]\n\n"
-                f"{lines[index]}\n\n"
-                "[yellow]You should not use airflow.utils.airflow_flask_app.get_airflow_app() in providers "
-                "as it is not available in Airflow 2.4+. Use current_app instead.[/]"
-            )
-
-
-if __name__ == "__main__":
-    for file in sys.argv[1:]:
-        _check_file(Path(file))
-    if errors:
-        console.print("[red]Found Airflow 2.2 compatibility problems in providers:[/]\n")
-        for error in errors:
-            console.print(f"{error}")
-        sys.exit(1)
diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_airflow_compatibility.py b/scripts/ci/pre_commit/pre_commit_check_provider_airflow_compatibility.py
new file mode 100755
index 0000000000..57aa9154f7
--- /dev/null
+++ b/scripts/ci/pre_commit/pre_commit_check_provider_airflow_compatibility.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import re
+import sys
+from pathlib import Path
+
+from rich.console import Console
+
+if __name__ not in ("__main__", "__mp_main__"):
+    raise SystemExit(
+        "This file is intended to be executed as an executable program. You cannot use it as a module."
+        f"To run this script, run the ./{__file__} command [FILE] ..."
+    )
+
+console = Console(color_system="standard", width=200)
+
+errors: list[str] = []
+
+SKIP_COMP_CHECK = "# ignore airflow compat check"
+GET_AIRFLOW_APP_MATCHER = re.compile(r".*get_airflow_app\(\)")
+
+
+def _check_file(_file: Path):
+    lines = _file.read_text().splitlines()
+
+    for index, line in enumerate(lines):
+        if SKIP_COMP_CHECK in line:
+            continue
+
+        if GET_AIRFLOW_APP_MATCHER.match(line):
+            errors.append(
+                f"[red]In {_file}:{index} there is a forbidden construct "
+                "(Airflow 2.4+ only):[/]\n\n"
+                f"{lines[index]}\n\n"
+                "[yellow]You should not use airflow.utils.airflow_flask_app.get_airflow_app() in providers "
+                "as it is not available in Airflow 2.4+. Use current_app instead.[/]"
+            )
+
+
+if __name__ == "__main__":
+    for file in sys.argv[1:]:
+        _check_file(Path(file))
+    if errors:
+        console.print("[red]Found Airflow 2.3+ compatibility problems in providers:[/]\n")
+        for error in errors:
+            console.print(f"{error}")
+        sys.exit(1)
diff --git a/scripts/in_container/verify_providers.py b/scripts/in_container/verify_providers.py
index 4415b41a52..ad5a6b0f73 100755
--- a/scripts/in_container/verify_providers.py
+++ b/scripts/in_container/verify_providers.py
@@ -161,6 +161,11 @@ KNOWN_DEPRECATED_MESSAGES: set[tuple[str, str]] = {
         " adheres to: 'pyarrow<3.1.0,>=3.0.0; extra == \"pandas\"'",
         "snowflake",
     ),
+    (
+        "You have an incompatible version of 'pyarrow' installed (5.0.0), please install a version that"
+        " adheres to: 'pyarrow<6.1.0,>=6.0.0; extra == \"pandas\"'",
+        "snowflake",
+    ),
     (
         "You have an incompatible version of 'pyarrow' installed (6.0.1), please install a version that"
         " adheres to: 'pyarrow<5.1.0,>=5.0.0; extra == \"pandas\"'",
diff --git a/tests/providers/snowflake/hooks/test_snowflake.py b/tests/providers/snowflake/hooks/test_snowflake.py
index 7abc50d121..87c661043a 100644
--- a/tests/providers/snowflake/hooks/test_snowflake.py
+++ b/tests/providers/snowflake/hooks/test_snowflake.py
@@ -553,23 +553,23 @@ class TestPytestSnowflakeHook:
         assert hook.query_ids == expected_query_ids
         cur.close.assert_called()
 
-    @mock.patch("airflow.providers.snowflake.hooks.snowflake.SnowflakeHook.run")
-    def test_connection_success(self, mock_run):
+    @mock.patch("airflow.providers.common.sql.hooks.sql.DbApiHook.get_first")
+    def test_connection_success(self, mock_get_first):
         with unittest.mock.patch.dict(
             "os.environ", AIRFLOW_CONN_SNOWFLAKE_DEFAULT=Connection(**BASE_CONNECTION_KWARGS).get_uri()
         ):
             hook = SnowflakeHook()
-            mock_run.return_value = [{"1": 1}]
+            mock_get_first.return_value = [{"1": 1}]
             status, msg = hook.test_connection()
             assert status is True
             assert msg == "Connection successfully tested"
-            mock_run.assert_called_once_with(sql="select 1")
+            mock_get_first.assert_called_once_with("select 1")
 
     @mock.patch(
-        "airflow.providers.snowflake.hooks.snowflake.SnowflakeHook.run",
+        "airflow.providers.common.sql.hooks.sql.DbApiHook.get_first",
         side_effect=Exception("Connection Errors"),
     )
-    def test_connection_failure(self, mock_run):
+    def test_connection_failure(self, mock_get_first):
         with unittest.mock.patch.dict(
             "os.environ", AIRFLOW_CONN_SNOWFLAKE_DEFAULT=Connection(**BASE_CONNECTION_KWARGS).get_uri()
         ):
@@ -577,7 +577,7 @@ class TestPytestSnowflakeHook:
             status, msg = hook.test_connection()
             assert status is False
             assert msg == "Connection Errors"
-            mock_run.assert_called_once_with(sql="select 1")
+            mock_get_first.assert_called_once_with("select 1")
 
     def test_empty_sql_parameter(self):
         hook = SnowflakeHook()