You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2023/07/26 06:25:12 UTC

[airflow] branch main updated: Move all k8S classes to cncf.kubernetes provider (#32767)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new e93460383f Move all k8S classes to cncf.kubernetes provider (#32767)
e93460383f is described below

commit e93460383f287f9b2af4b6bda3ea6ba17ba3c08b
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Wed Jul 26 08:25:02 2023 +0200

    Move all k8S classes to cncf.kubernetes provider (#32767)
    
    * Move all k8S classes to cncf.kubernetes provider
    
    This is the big move of all Kubenetes classes to go to provider.
    
    The changes that are implemented in this move:
    
    * replaced all imports from airflow.kubernetes to cncf.kubernetes
      Swith PEP-563 dynamic import rediretion and deprecation messages
      those messages now support overriding the "replacement" hints
      to make K8s deprecations more accurate
    * pre_7_4_0_compatibility package with classes used by past
      providerrs have been "frozen" and stored in the package with
      import redirections from airflow.kubernetes(with deprecation warnings)
    * kubernetes configuration is moved to kubernetes provider
    * mypy started complaining about conf and set used in configuration.
      so better solution to handle deprecations and hinting conf
      returning AirlfowConfigParsing was added.
    * example_kuberntes_executor uses configuration reading not in
      top level but in execute method
    * PodMutationHookException and PodReconciliationError have
      been moved to cncf.kubernetes provider and they are imported
      from there with fallback to an airflow.exception ones in case
      old provider is used in Airflow 2.7.0
    * k8s methods in task_instance have been deprecated and reolaced
      with functions in "cncf.kubernetes` template_rendering module
      the old way still works but raise deprecaton warnings.
    * added extras with versions for celery and k8s
    * raise AirflowOptionalProviderFeatureException in case there is
      attempt to use CeleryK8sExecutor and cncf.k8s is not installed.
    * added few "new" core utils to k8s (hashlib_wrapper etc)
    * both warnings and errors indicate minimum versions for both cncf.k8s
      and Celery providers.
    
    * Update newsfragments/32767.significant.rst
    
    Co-authored-by: Jed Cunningham <66...@users.noreply.github.com>
    
    ---------
    
    Co-authored-by: Jed Cunningham <66...@users.noreply.github.com>
---
 .github/CODEOWNERS                                 |   1 -
 .github/boring-cyborg.yml                          |   6 -
 .pre-commit-config.yaml                            |  16 ++
 STATIC_CODE_CHECKS.rst                             |   4 +
 airflow/cli/commands/kubernetes_command.py         |  10 +-
 airflow/config_templates/__init__.py               |   2 +-
 airflow/config_templates/config.yml                | 210 ---------------------
 airflow/configuration.py                           |   9 +-
 airflow/decorators/__init__.pyi                    |   2 +-
 .../example_dags/example_kubernetes_executor.py    |   5 +-
 airflow/exceptions.py                              |  28 ++-
 airflow/executors/__init__.py                      |  30 ++-
 airflow/executors/executor_loader.py               |   9 +-
 airflow/kubernetes/__init__.py                     | 125 ++++++++++++
 .../__init__.py}                                   |  18 +-
 .../{ => pre_7_4_0_compatibility}/k8s_model.py     |   0
 .../{ => pre_7_4_0_compatibility}/kube_client.py   |   0
 .../{ => pre_7_4_0_compatibility}/pod_generator.py |  33 +++-
 .../pod_generator_deprecated.py                    |   0
 .../{ => pre_7_4_0_compatibility}/secret.py        |   2 +-
 airflow/kubernetes/volume.py                       |  33 ----
 airflow/kubernetes/volume_mount.py                 |  33 ----
 airflow/models/base.py                             |   2 +-
 airflow/models/renderedtifields.py                 |   6 +-
 airflow/models/taskinstance.py                     |  80 ++++----
 .../providers/amazon/aws/notifications/chime.py    |   2 +-
 airflow/providers/apache/spark/CHANGELOG.rst       |  19 ++
 airflow/providers/apache/spark/__init__.py         |   2 +-
 .../providers/apache/spark/hooks/spark_submit.py   |   2 +-
 airflow/providers/apache/spark/provider.yaml       |   6 +
 airflow/providers/celery/CHANGELOG.rst             |  17 +-
 airflow/providers/celery/__init__.py               |   2 +-
 .../celery/executors/celery_kubernetes_executor.py |   9 +-
 airflow/providers/celery/provider.yaml             |   7 +-
 airflow/providers/cncf/kubernetes/CHANGELOG.rst    |  17 +-
 .../cncf/kubernetes/executors}/__init__.py         |   0
 .../kubernetes}/executors/kubernetes_executor.py   |  48 +++--
 .../executors/kubernetes_executor_types.py         |   0
 .../executors/kubernetes_executor_utils.py         |  38 +++-
 .../executors/local_kubernetes_executor.py         |   2 +-
 .../providers/cncf/kubernetes/hooks/kubernetes.py  |   2 +-
 .../{ => providers/cncf}/kubernetes/k8s_model.py   |   0
 .../{ => providers/cncf}/kubernetes/kube_client.py |   8 +-
 .../{ => providers/cncf}/kubernetes/kube_config.py |   1 -
 .../kubernetes_executor_templates}/__init__.py     |   0
 .../basic_template.yaml                            |   0
 .../kubernetes/kubernetes_helper_functions.py      |  10 +-
 airflow/providers/cncf/kubernetes/operators/pod.py |   6 +-
 .../cncf}/kubernetes/pod_generator.py              |  20 +-
 .../cncf}/kubernetes/pod_generator_deprecated.py   |   5 +-
 .../cncf}/kubernetes/pod_launcher_deprecated.py    |   4 +-
 .../pod_template_file_examples}/__init__.py        |   0
 .../dags_in_image_template.yaml                    |   0
 .../dags_in_volume_template.yaml                   |   0
 .../git_sync_template.yaml                         |   0
 airflow/providers/cncf/kubernetes/provider.yaml    | 210 +++++++++++++++++++++
 airflow/{ => providers/cncf}/kubernetes/secret.py  |   2 +-
 .../cncf/kubernetes/template_rendering.py          |  67 +++++++
 .../cncf/kubernetes/utils/k8s_hashlib_wrapper.py}  |  36 ++--
 .../providers/cncf/kubernetes/utils/pod_manager.py |   4 +-
 airflow/serialization/serialized_objects.py        |   9 +-
 airflow/serialization/serializers/kubernetes.py    |   7 +-
 airflow/timetables/_cron.py                        |   4 +-
 airflow/utils/deprecation_tools.py                 |  68 ++++++-
 .../__init__.py => utils/empty_set.py}             |  19 +-
 airflow/utils/hashlib_wrapper.py                   |   5 +-
 airflow/utils/sqlalchemy.py                        |   8 +-
 airflow/www/views.py                               |   6 +-
 .../airflow_breeze/commands/kubernetes_commands.py |   2 +-
 dev/breeze/src/airflow_breeze/pre_commit_ids.py    |   2 +
 dev/breeze/tests/test_selective_checks.py          |   3 +-
 .../changelog.rst                                  |   2 -
 .../commits.rst                                    |  16 ++
 .../index.rst                                      |  30 ++-
 docs/apache-airflow-providers-celery/changelog.rst |   2 -
 docs/apache-airflow-providers-celery/commits.rst   |  21 +++
 docs/apache-airflow-providers-celery/index.rst     |  30 ++-
 .../configurations-ref.rst}                        |   5 +-
 .../index.rst                                      |   1 +
 .../operators.rst                                  |   2 +-
 docs/apache-airflow/configurations-ref.rst         |   1 +
 .../core-concepts/executor/celery.rst              |   7 +
 .../core-concepts/executor/celery_kubernetes.rst   |   8 +
 .../core-concepts/executor/kubernetes.rst          |  14 +-
 .../core-concepts/executor/local_kubernetes.rst    |   9 +-
 docs/apache-airflow/extra-packages-ref.rst         |  90 ++++-----
 .../howto/upgrading-from-1-10/index.rst            |   4 +-
 docs/spelling_wordlist.txt                         |   5 +
 generated/provider_dependencies.json               |   8 +-
 images/breeze/output-commands-hash.txt             |   2 +-
 images/breeze/output_static-checks.svg             | 142 +++++++-------
 .../conftest.py                                    |  15 +-
 kubernetes_tests/test_kubernetes_pod_operator.py   |   8 +-
 newsfragments/32767.significant.rst                |   7 +
 .../pre_commit_check_airflow_k8s_not_used.py       |  81 ++++++++
 ...it_check_cncf_k8s_used_for_k8s_executor_only.py |  84 +++++++++
 .../in_container/run_provider_yaml_files_check.py  |   2 +
 setup.py                                           |   3 -
 tests/cli/commands/test_kubernetes_command.py      |   6 +-
 tests/cli/commands/test_task_command.py            |   8 +-
 tests/models/test_renderedtifields.py              |  48 -----
 tests/models/test_taskinstance.py                  |  81 --------
 .../apache/spark/hooks/test_spark_submit.py        |   2 +-
 .../executors/test_celery_kubernetes_executor.py   |   2 +-
 tests/providers/cncf/kubernetes/__init__.py        |   1 +
 .../{ => providers/cncf}/kubernetes/basic_pod.yaml |   0
 .../cncf/kubernetes/executors}/__init__.py         |   0
 .../__init__.py                                    |   0
 .../basic_template.yaml                            |   0
 .../executors/test_kubernetes_executor.py          | 158 +++++++++-------
 .../executors/test_local_kubernetes_executor.py    |   4 +-
 tests/{ => providers/cncf}/kubernetes/kube_config  |   0
 .../cncf}/kubernetes/models/__init__.py            |   0
 .../cncf}/kubernetes/models/test_secret.py         |  10 +-
 .../cncf/kubernetes/operators/test_pod.py          |   2 +-
 tests/{ => providers/cncf}/kubernetes/pod.yaml     |   0
 .../cncf}/kubernetes/pod_generator_base.yaml       |   0
 .../pod_generator_base_with_secrets.yaml           |   0
 .../{ => providers/cncf}/kubernetes/test_client.py |  18 +-
 .../kubernetes/test_kubernetes_helper_functions.py |   2 +-
 .../cncf}/kubernetes/test_pod_generator.py         |  33 ++--
 .../cncf/kubernetes/test_template_rendering.py     | 156 +++++++++++++++
 tests/sensors/test_base.py                         |   6 +-
 tests/serialization/test_dag_serialization.py      |   4 +-
 .../cncf/kubernetes/example_kubernetes.py          |   2 +-
 .../cncf/kubernetes/example_kubernetes_async.py    |   2 +-
 tests/utils/test_log_handlers.py                   |   6 +-
 127 files changed, 1641 insertions(+), 852 deletions(-)

diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index dfc0e13a3a..aa036aeac5 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -11,7 +11,6 @@
 
 # Kubernetes
 /airflow/kubernetes/ @dstandish @jedcunningham
-/airflow/kubernetes_executor_templates/ @dstandish @jedcunningham
 /airflow/executors/celery_kubernetes_executor.py @dstandish @jedcunningham
 /airflow/executors/kubernetes_executor.py @dstandish @jedcunningham
 
diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml
index 989bb055df..e2f7c037ec 100644
--- a/.github/boring-cyborg.yml
+++ b/.github/boring-cyborg.yml
@@ -164,9 +164,6 @@ labelPRBasedOnFilePath:
     - airflow/**/kubernetes_*.py
     - airflow/example_dags/example_kubernetes_executor.py
     - airflow/providers/cncf/kubernetes/**/*
-    - airflow/kubernetes/**/*
-    - airflow/kubernetes_executor_templates/**/*
-    - airflow/executors/kubernetes_executor.py
     - airflow/providers/celery/executors/celery_kubernetes_executor.py
     - docs/apache-airflow/core-concepts/executor/kubernetes.rst
     - docs/apache-airflow/core-concepts/executor/celery_kubernetes.rst
@@ -174,9 +171,6 @@ labelPRBasedOnFilePath:
     - kubernetes_tests/**/*
     - tests/providers/cncf/kubernetes/**/*
     - tests/system/providers/cncf/kubernetes/**/*
-    - tests/kubernetes/**/*
-    - tests/executors/kubernetes_executor_template_files/**/*
-    - tests/executors/*kubernetes*.py
 
   area:API:
     - airflow/api/**/*
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ff8085d030..d289ef97c4 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -333,6 +333,22 @@ repos:
         pass_filenames: false
         entry: ./scripts/ci/pre_commit/pre_commit_check_order_setup.py
         additional_dependencies: ['rich>=12.4.4']
+      - id: check-airflow-k8s-not-used
+        name: Check airflow.kubernetes imports are not used
+        language: python
+        files: ^airflow/.*\.py$
+        require_serial: true
+        exclude: ^airflow/kubernetes/
+        entry: ./scripts/ci/pre_commit/pre_commit_check_airflow_k8s_not_used.py
+        additional_dependencies: ['rich>=12.4.4']
+      - id: check-cncf-k8s-only-for-executors
+        name: Check cncf.kubernetes imports used for executors only
+        language: python
+        files: ^airflow/.*\.py$
+        require_serial: true
+        exclude: ^airflow/kubernetes/|^airflow/providers/
+        entry: ./scripts/ci/pre_commit/pre_commit_check_cncf_k8s_used_for_k8s_executor_only.py
+        additional_dependencies: ['rich>=12.4.4']
       - id: check-extra-packages-references
         name: Checks setup extra packages
         description: Checks if all the libraries in setup.py are listed in extra-packages-ref.rst file
diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst
index 1729c8ba8e..bdb77e5c83 100644
--- a/STATIC_CODE_CHECKS.rst
+++ b/STATIC_CODE_CHECKS.rst
@@ -146,6 +146,8 @@ require Breeze Docker image to be built locally.
 +-----------------------------------------------------------+--------------------------------------------------------------+---------+
 | check-aiobotocore-optional                                | Check if aiobotocore is an optional dependency only          |         |
 +-----------------------------------------------------------+--------------------------------------------------------------+---------+
+| check-airflow-k8s-not-used                                | Check airflow.kubernetes imports are not used                |         |
++-----------------------------------------------------------+--------------------------------------------------------------+---------+
 | check-airflow-provider-compatibility                      | Check compatibility of Providers with Airflow                |         |
 +-----------------------------------------------------------+--------------------------------------------------------------+---------+
 | check-apache-license-rat                                  | Check if licenses are OK for Apache                          |         |
@@ -163,6 +165,8 @@ require Breeze Docker image to be built locally.
 +-----------------------------------------------------------+--------------------------------------------------------------+---------+
 | check-changelog-has-no-duplicates                         | Check changelogs for duplicate entries                       |         |
 +-----------------------------------------------------------+--------------------------------------------------------------+---------+
+| check-cncf-k8s-only-for-executors                         | Check cncf.kubernetes imports used for executors only        |         |
++-----------------------------------------------------------+--------------------------------------------------------------+---------+
 | check-core-deprecation-classes                            | Verify usage of Airflow deprecation classes in core          |         |
 +-----------------------------------------------------------+--------------------------------------------------------------+---------+
 | check-daysago-import-from-utils                           | Make sure days_ago is imported from airflow.utils.dates      |         |
diff --git a/airflow/cli/commands/kubernetes_command.py b/airflow/cli/commands/kubernetes_command.py
index 1555f7be92..056465577f 100644
--- a/airflow/cli/commands/kubernetes_command.py
+++ b/airflow/cli/commands/kubernetes_command.py
@@ -25,12 +25,12 @@ from kubernetes import client
 from kubernetes.client.api_client import ApiClient
 from kubernetes.client.rest import ApiException
 
-from airflow.executors.kubernetes_executor import KubeConfig
-from airflow.kubernetes import pod_generator
-from airflow.kubernetes.kube_client import get_kube_client
-from airflow.kubernetes.kubernetes_helper_functions import create_pod_id
-from airflow.kubernetes.pod_generator import PodGenerator
 from airflow.models import DagRun, TaskInstance
+from airflow.providers.cncf.kubernetes import pod_generator
+from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubeConfig
+from airflow.providers.cncf.kubernetes.kube_client import get_kube_client
+from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_pod_id
+from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
 from airflow.utils import cli as cli_utils, yaml
 from airflow.utils.cli import get_dag
 from airflow.utils.providers_configuration_loader import providers_configuration_loaded
diff --git a/airflow/config_templates/__init__.py b/airflow/config_templates/__init__.py
index 6dd06760cd..4bdf46b9a9 100644
--- a/airflow/config_templates/__init__.py
+++ b/airflow/config_templates/__init__.py
@@ -25,4 +25,4 @@ __deprecated_classes = {
     },
 }
 
-add_deprecated_classes(__deprecated_classes, __name__)
+add_deprecated_classes(__deprecated_classes, __name__, {}, "The `celery` provider must be >= 3.3.0 for that.")
diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml
index 35f16c57a8..b550c25932 100644
--- a/airflow/config_templates/config.yml
+++ b/airflow/config_templates/config.yml
@@ -1943,21 +1943,6 @@ sentry:
       type: string
       example: ~
       default: ~
-local_kubernetes_executor:
-  description: |
-    This section only applies if you are using the ``LocalKubernetesExecutor`` in
-    ``[core]`` section above
-  options:
-    kubernetes_queue:
-      description: |
-        Define when to send a task to ``KubernetesExecutor`` when using ``LocalKubernetesExecutor``.
-        When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``),
-        the task is executed via ``KubernetesExecutor``,
-        otherwise via ``LocalExecutor``
-      version_added: 2.3.0
-      type: string
-      example: ~
-      default: "kubernetes"
 scheduler:
   description: ~
   options:
@@ -2442,201 +2427,6 @@ elasticsearch_configs:
       type: string
       example: ~
       default: "True"
-kubernetes_executor:
-  description: ~
-  renamed:
-    previous_name: kubernetes
-    version: 2.5.0
-  options:
-    api_client_retry_configuration:
-      description: |
-        Kwargs to override the default urllib3 Retry used in the kubernetes API client
-      version_added: 2.6.0
-      type: string
-      example: '{ "total": 3, "backoff_factor": 0.5 }'
-      default: ""
-    logs_task_metadata:
-      description: |
-        Flag to control the information added to kubernetes executor logs for better traceability
-      version_added: 2.7.0
-      type: boolean
-      example: ~
-      default: "False"
-    pod_template_file:
-      description: |
-        Path to the YAML pod file that forms the basis for KubernetesExecutor workers.
-      version_added: 1.10.11
-      type: string
-      example: ~
-      default: ""
-      see_also: ":ref:`concepts:pod_template_file`"
-    worker_container_repository:
-      description: |
-        The repository of the Kubernetes Image for the Worker to Run
-      version_added: ~
-      type: string
-      example: ~
-      default: ""
-    worker_container_tag:
-      description: |
-        The tag of the Kubernetes Image for the Worker to Run
-      version_added: ~
-      type: string
-      example: ~
-      default: ""
-    namespace:
-      description: |
-        The Kubernetes namespace where airflow workers should be created. Defaults to ``default``
-      version_added: ~
-      type: string
-      example: ~
-      default: "default"
-    delete_worker_pods:
-      description: |
-        If True, all worker pods will be deleted upon termination
-      version_added: ~
-      type: string
-      example: ~
-      default: "True"
-    delete_worker_pods_on_failure:
-      description: |
-        If False (and delete_worker_pods is True),
-        failed worker pods will not be deleted so users can investigate them.
-        This only prevents removal of worker pods where the worker itself failed,
-        not when the task it ran failed.
-      version_added: 1.10.11
-      type: string
-      example: ~
-      default: "False"
-    worker_pods_creation_batch_size:
-      description: |
-        Number of Kubernetes Worker Pod creation calls per scheduler loop.
-        Note that the current default of "1" will only launch a single pod
-        per-heartbeat. It is HIGHLY recommended that users increase this
-        number to match the tolerance of their kubernetes cluster for
-        better performance.
-      version_added: 1.10.3
-      type: string
-      example: ~
-      default: "1"
-    multi_namespace_mode:
-      description: |
-        Allows users to launch pods in multiple namespaces.
-        Will require creating a cluster-role for the scheduler,
-        or use multi_namespace_mode_namespace_list configuration.
-      version_added: 1.10.12
-      type: boolean
-      example: ~
-      default: "False"
-    multi_namespace_mode_namespace_list:
-      description: |
-        If multi_namespace_mode is True while scheduler does not have a cluster-role,
-        give the list of namespaces where the scheduler will schedule jobs
-        Scheduler needs to have the necessary permissions in these namespaces.
-      version_added: 2.6.0
-      type: string
-      example: ~
-      default: ""
-    in_cluster:
-      description: |
-        Use the service account kubernetes gives to pods to connect to kubernetes cluster.
-        It's intended for clients that expect to be running inside a pod running on kubernetes.
-        It will raise an exception if called from a process not running in a kubernetes environment.
-      version_added: ~
-      type: string
-      example: ~
-      default: "True"
-    cluster_context:
-      description: |
-        When running with in_cluster=False change the default cluster_context or config_file
-        options to Kubernetes client. Leave blank these to use default behaviour like ``kubectl`` has.
-      version_added: 1.10.3
-      type: string
-      example: ~
-      default: ~
-    config_file:
-      description: |
-        Path to the kubernetes configfile to be used when ``in_cluster`` is set to False
-      version_added: 1.10.3
-      type: string
-      example: ~
-      default: ~
-    kube_client_request_args:
-      description: |
-        Keyword parameters to pass while calling a kubernetes client core_v1_api methods
-        from Kubernetes Executor provided as a single line formatted JSON dictionary string.
-        List of supported params are similar for all core_v1_apis, hence a single config
-        variable for all apis. See:
-        https://raw.githubusercontent.com/kubernetes-client/python/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/api/core_v1_api.py
-      version_added: 1.10.4
-      type: string
-      example: ~
-      default: ""
-    delete_option_kwargs:
-      description: |
-        Optional keyword arguments to pass to the ``delete_namespaced_pod`` kubernetes client
-        ``core_v1_api`` method when using the Kubernetes Executor.
-        This should be an object and can contain any of the options listed in the ``v1DeleteOptions``
-        class defined here:
-        https://github.com/kubernetes-client/python/blob/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/models/v1_delete_options.py#L19
-      version_added: 1.10.12
-      type: string
-      example: '{"grace_period_seconds": 10}'
-      default: ""
-    enable_tcp_keepalive:
-      description: |
-        Enables TCP keepalive mechanism. This prevents Kubernetes API requests to hang indefinitely
-        when idle connection is time-outed on services like cloud load balancers or firewalls.
-      version_added: 2.0.0
-      type: boolean
-      example: ~
-      default: "True"
-    tcp_keep_idle:
-      description: |
-        When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has
-        been idle for `tcp_keep_idle` seconds.
-      version_added: 2.0.0
-      type: integer
-      example: ~
-      default: "120"
-    tcp_keep_intvl:
-      description: |
-        When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond
-        to a keepalive probe, TCP retransmits the probe after `tcp_keep_intvl` seconds.
-      version_added: 2.0.0
-      type: integer
-      example: ~
-      default: "30"
-    tcp_keep_cnt:
-      description: |
-        When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond
-        to a keepalive probe, TCP retransmits the probe `tcp_keep_cnt number` of times before
-        a connection is considered to be broken.
-      version_added: 2.0.0
-      type: integer
-      example: ~
-      default: "6"
-    verify_ssl:
-      description: |
-        Set this to false to skip verifying SSL certificate of Kubernetes python client.
-      version_added: 2.1.0
-      type: boolean
-      example: ~
-      default: "True"
-    worker_pods_queued_check_interval:
-      description: |
-        How often in seconds to check for task instances stuck in "queued" status without a pod
-      version_added: 2.2.0
-      type: integer
-      example: ~
-      default: "60"
-    ssl_ca_cert:
-      description: |
-        Path to a CA certificate to be used by the Kubernetes client to verify the server's SSL certificate.
-      version_added: 2.6.0
-      type: string
-      example: ~
-      default: ""
 sensors:
   description: ~
   options:
diff --git a/airflow/configuration.py b/airflow/configuration.py
index 54ce39225d..afbb021834 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -46,6 +46,7 @@ from airflow.auth.managers.base_auth_manager import BaseAuthManager
 from airflow.exceptions import AirflowConfigException
 from airflow.secrets import DEFAULT_SECRETS_SEARCH_PATH, BaseSecretsBackend
 from airflow.utils import yaml
+from airflow.utils.empty_set import _get_empty_set_for_configuration
 from airflow.utils.module_loading import import_string
 from airflow.utils.providers_configuration_loader import providers_configuration_loaded
 from airflow.utils.weight_rule import WeightRule
@@ -66,8 +67,6 @@ ConfigSourcesType = Dict[str, ConfigSectionSourcesType]
 
 ENV_VAR_PREFIX = "AIRFLOW__"
 
-EMPTY_SET: Set[tuple[str, str]] = set()  # noqa: UP006
-
 
 def _parse_sqlite_version(s: str) -> tuple[int, ...]:
     match = _SQLITE3_VERSION_PATTERN.match(s)
@@ -299,7 +298,9 @@ class AirflowConfigParser(ConfigParser):
     @functools.cached_property
     def sensitive_config_values(self) -> Set[tuple[str, str]]:  # noqa: UP006
         if self.configuration_description is None:
-            return EMPTY_SET.copy()  # we can't use set() here because set is defined below # ¯\_(ツ)_/¯
+            return (
+                _get_empty_set_for_configuration()
+            )  # we can't use set() here because set is defined below # ¯\_(ツ)_/¯
         flattened = {
             (s, k): item
             for s, s_c in self.configuration_description.items()
@@ -2313,6 +2314,6 @@ SECRET_KEY = b64encode(os.urandom(16)).decode("utf-8")
 FERNET_KEY = ""  # Set only if needed when generating a new file
 WEBSERVER_CONFIG = ""  # Set by initialize_config
 
-conf = initialize_config()
+conf: AirflowConfigParser = initialize_config()
 secrets_backend_list = initialize_secrets_backends()
 conf.validate()
diff --git a/airflow/decorators/__init__.pyi b/airflow/decorators/__init__.pyi
index 92fa4dda5b..7b2ac1c9ce 100644
--- a/airflow/decorators/__init__.pyi
+++ b/airflow/decorators/__init__.pyi
@@ -33,8 +33,8 @@ from airflow.decorators.python_virtualenv import virtualenv_task
 from airflow.decorators.sensor import sensor_task
 from airflow.decorators.short_circuit import short_circuit_task
 from airflow.decorators.task_group import task_group
-from airflow.kubernetes.secret import Secret
 from airflow.models.dag import dag
+from airflow.providers.cncf.kubernetes.secret import Secret
 
 # Please keep this in sync with __init__.py's __all__.
 __all__ = [
diff --git a/airflow/example_dags/example_kubernetes_executor.py b/airflow/example_dags/example_kubernetes_executor.py
index b9e6bdba35..a3d6570ac8 100644
--- a/airflow/example_dags/example_kubernetes_executor.py
+++ b/airflow/example_dags/example_kubernetes_executor.py
@@ -32,9 +32,6 @@ from airflow.example_dags.libs.helper import print_stuff
 
 log = logging.getLogger(__name__)
 
-worker_container_repository = conf.get("kubernetes_executor", "worker_container_repository")
-worker_container_tag = conf.get("kubernetes_executor", "worker_container_tag")
-
 try:
     from kubernetes.client import models as k8s
 except ImportError:
@@ -163,6 +160,8 @@ if k8s:
             print_stuff()
 
         other_ns_task = other_namespace_task()
+        worker_container_repository = conf.get("kubernetes_executor", "worker_container_repository")
+        worker_container_tag = conf.get("kubernetes_executor", "worker_container_tag")
 
         # You can also change the base image, here we used the worker image for demonstration.
         # Note that the image must have the same configuration as the
diff --git a/airflow/exceptions.py b/airflow/exceptions.py
index 8c65a1f66f..ea162fe8db 100644
--- a/airflow/exceptions.py
+++ b/airflow/exceptions.py
@@ -375,12 +375,28 @@ class TaskDeferralError(AirflowException):
     """Raised when a task failed during deferral for some reason."""
 
 
-class PodMutationHookException(AirflowException):
-    """Raised when exception happens during Pod Mutation Hook execution."""
-
-
-class PodReconciliationError(AirflowException):
-    """Raised when an error is encountered while trying to merge pod configs."""
+# The try/except handling is needed after we moved all k8s classes to cncf.kubernetes provider
+# These two exceptions are used internally by Kubernetes Executor but also by PodGenerator, so we need
+# to leave them here in case older version of cncf.kubernetes provider is used to run KubernetesPodOperator
+# and it raises one of those exceptions. The code should be backwards compatible even if you import
+# and try/except the exception using direct imports from airflow.exceptions.
+# 1) if you have old provider, both provider and pod generator will throw the "airflow.exceptions" exception.
+# 2) if you have new provider, both provider and pod generator will throw the
+#    "airflow.providers.cncf.kubernetes" as it will be imported here from the provider.
+try:
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import PodMutationHookException
+except ImportError:
+
+    class PodMutationHookException(AirflowException):  # type: ignore[no-redef]
+        """Raised when exception happens during Pod Mutation Hook execution."""
+
+
+try:
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import PodReconciliationError
+except ImportError:
+
+    class PodReconciliationError(AirflowException):  # type: ignore[no-redef]
+        """Raised when an error is encountered while trying to merge pod configs."""
 
 
 class RemovedInAirflow3Warning(DeprecationWarning):
diff --git a/airflow/executors/__init__.py b/airflow/executors/__init__.py
index ceeec52339..f6e18b20a8 100644
--- a/airflow/executors/__init__.py
+++ b/airflow/executors/__init__.py
@@ -31,6 +31,34 @@ __deprecated_classes = {
     "dask_executor": {
         "DaskExecutor": "airflow.providers.daskexecutor.executors.dask_executor.DaskExecutor",
     },
+    "kubernetes_executor": {
+        "KubernetesExecutor": "airflow.providers.cncf.kubernetes."
+        "executors.kubernetes_executor.KubernetesExecutor",
+    },
+    "kubernetes_executor_types": {
+        "ALL_NAMESPACES": "airflow.providers.cncf.kubernetes."
+        "executors.kubernetes_executor_types.ALL_NAMESPACES",
+        "POD_EXECUTOR_DONE_KEY": "airflow.providers.cncf.kubernetes."
+        "executors.kubernetes_executor_types.POD_EXECUTOR_DONE_KEY",
+    },
+    "kubernetes_executor_utils": {
+        "AirflowKubernetesScheduler": "airflow.providers.cncf.kubernetes."
+        "executors.kubernetes_executor_utils.AirflowKubernetesScheduler",
+        "KubernetesJobWatcher": "airflow.providers.cncf.kubernetes."
+        "executors.kubernetes_executor_utils.KubernetesJobWatcher",
+        "ResourceVersion": "airflow.providers.cncf.kubernetes."
+        "executors.kubernetes_executor_utils.ResourceVersion",
+    },
+    "local_kubernetes_executor": {
+        "LocalKubernetesExecutor": "airflow.providers.cncf.kubernetes.executors.LocalKubernetesExecutor",
+    },
 }
 
-add_deprecated_classes(__deprecated_classes, __name__)
+add_deprecated_classes(
+    __deprecated_classes,
+    __name__,
+    {},
+    "For Celery executors, the `celery` provider should be >= 3.3.0. "
+    "For Kubernetes executors, the `cncf.kubernetes` provider should be >= 7.4.0 for that. "
+    "For Dask executors, any version of `daskexecutor` provider is needed.",
+)
diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py
index 3553a71183..ca21bdf05a 100644
--- a/airflow/executors/executor_loader.py
+++ b/airflow/executors/executor_loader.py
@@ -58,13 +58,15 @@ class ExecutorLoader:
     _default_executor: BaseExecutor | None = None
     executors = {
         LOCAL_EXECUTOR: "airflow.executors.local_executor.LocalExecutor",
-        LOCAL_KUBERNETES_EXECUTOR: "airflow.executors.local_kubernetes_executor.LocalKubernetesExecutor",
+        LOCAL_KUBERNETES_EXECUTOR: "airflow.providers.cncf.kubernetes."
+        "executors.local_kubernetes_executor.LocalKubernetesExecutor",
         SEQUENTIAL_EXECUTOR: "airflow.executors.sequential_executor.SequentialExecutor",
         CELERY_EXECUTOR: "airflow.providers.celery.executors.celery_executor.CeleryExecutor",
         CELERY_KUBERNETES_EXECUTOR: "airflow.providers.celery."
         "executors.celery_kubernetes_executor.CeleryKubernetesExecutor",
         DASK_EXECUTOR: "airflow.providers.daskexecutor.executors.dask_executor.DaskExecutor",
-        KUBERNETES_EXECUTOR: "airflow.executors.kubernetes_executor.KubernetesExecutor",
+        KUBERNETES_EXECUTOR: "airflow.providers.cncf.kubernetes."
+        "executors.kubernetes_executor.KubernetesExecutor",
         DEBUG_EXECUTOR: "airflow.executors.debug_executor.DebugExecutor",
     }
 
@@ -98,9 +100,6 @@ class ExecutorLoader:
 
         :return: an instance of executor class via executor_name
         """
-        from airflow.providers_manager import ProvidersManager
-
-        ProvidersManager().initialize_providers_configuration()
         if executor_name == CELERY_KUBERNETES_EXECUTOR:
             return cls.__load_celery_kubernetes_executor()
         elif executor_name == LOCAL_KUBERNETES_EXECUTOR:
diff --git a/airflow/kubernetes/__init__.py b/airflow/kubernetes/__init__.py
index 13a83393a9..cf4e9a9591 100644
--- a/airflow/kubernetes/__init__.py
+++ b/airflow/kubernetes/__init__.py
@@ -14,3 +14,128 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
+from airflow.utils.deprecation_tools import add_deprecated_classes
+
+__deprecated_classes: dict[str, dict[str, str]] = {
+    "kubernetes_helper_functions": {
+        "add_pod_suffix": "airflow.providers.cncf.kubernetes.kubernetes_helper_functions.add_pod_suffix.",
+        "annotations_for_logging_task_metadata": "airflow.providers.cncf.kubernetes."
+        "kubernetes_helper_functions."
+        "annotations_for_logging_task_metadata.",
+        "annotations_to_key": "airflow.providers.cncf.kubernetes."
+        "kubernetes_helper_functions.annotations_to_key",
+        "create_pod_id": "airflow.providers.cncf.kubernetes.kubernetes_helper_functions.create_pod_id",
+        "get_logs_task_metadata": "airflow.providers.cncf.kubernetes."
+        "kubernetes_helper_functions.get_logs_task_metadata",
+        "rand_str": "airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str",
+    },
+    "pod": {
+        "Port": "airflow.providers.cncf.kubernetes.backcompat.pod.Port",
+        "Resources": "airflow.providers.cncf.kubernetes.backcompat.pod.Resources",
+    },
+    "pod_launcher": {
+        "PodLauncher": "airflow.providers.cncf.kubernetes.pod_launcher.PodLauncher",
+        "PodStatus": "airflow.providers.cncf.kubernetes.pod_launcher.PodStatus",
+    },
+    "pod_launcher_deprecated": {
+        "PodLauncher": "airflow.providers.cncf.kubernetes.pod_launcher_deprecated.PodLauncher",
+        "PodStatus": "airflow.providers.cncf.kubernetes.pod_launcher_deprecated.PodStatus",
+    },
+    "pod_runtime_info_env": {
+        "PodRuntimeInfoEnv": "airflow.providers.cncf.kubernetes.backcompat."
+        "pod_runtime_info_env.PodRuntimeInfoEnv",
+    },
+    "volume": {
+        "Volume": "airflow.providers.cncf.kubernetes.backcompat.volume.Volume",
+    },
+    "volume_mount": {
+        "VolumeMount": "airflow.providers.cncf.kubernetes.backcompat.volume_mount.VolumeMount",
+    },
+    # the below classes are not served from provider but from internal pre_7_4_0_compatibility package
+    "k8s_model": {
+        "K8SModel": "airflow.kubernetes.pre_7_4_0_compatibility.k8s_model.K8SModel",
+        "append_to_pod": "airflow.kubernetes.pre_7_4_0_compatibility.k8s_model.append_to_pod",
+    },
+    "kube_client": {
+        "_disable_verify_ssl": "airflow.kubernetes.pre_7_4_0_compatibility.kube_client._disable_verify_ssl",
+        "_enable_tcp_keepalive": "airflow.kubernetes.pre_7_4_0_compatibility.kube_client."
+        "_enable_tcp_keepalive",
+        "get_kube_client": "airflow.kubernetes.pre_7_4_0_compatibility.kube_client.get_kube_client",
+    },
+    "pod_generator": {
+        "datetime_to_label_safe_datestring": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator"
+        ".datetime_to_label_safe_datestring",
+        "extend_object_field": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator."
+        "extend_object_field",
+        "label_safe_datestring_to_datetime": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator."
+        "label_safe_datestring_to_datetime",
+        "make_safe_label_value": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator."
+        "make_safe_label_value",
+        "merge_objects": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator.merge_objects",
+        "PodGenerator": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator.PodGenerator",
+    },
+    "pod_generator_deprecated": {
+        "make_safe_label_value": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator_deprecated."
+        "make_safe_label_value",
+        "PodDefaults": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator_deprecated.PodDefaults",
+        "PodGenerator": "airflow.kubernetes.pre_7_4_0_compatibility.pod_generator_deprecated.PodGenerator",
+    },
+    "secret": {
+        "Secret": "airflow.kubernetes.pre_7_4_0_compatibility.secret.Secret",
+    },
+}
+
+__override_deprecated_names: dict[str, dict[str, str]] = {
+    "pod": {
+        "Port": "kubernetes.client.models.V1ContainerPort",
+        "Resources": "kubernetes.client.models.V1ResourceRequirements",
+    },
+    "pod_runtime_info_env": {
+        "PodRuntimeInfoEnv": "kubernetes.client.models.V1EnvVar",
+    },
+    "volume": {
+        "Volume": "kubernetes.client.models.V1Volume",
+    },
+    "volume_mount": {
+        "VolumeMount": "kubernetes.client.models.V1VolumeMount",
+    },
+    "k8s_model": {
+        "K8SModel": "airflow.airflow.providers.cncf.kubernetes.k8s_model.K8SModel",
+        "append_to_pod": "airflow.airflow.providers.cncf.kubernetes.k8s_model.append_to_pod",
+    },
+    "kube_client": {
+        "_disable_verify_ssl": "airflow.kubernetes.airflow.providers.cncf.kubernetes."
+        "kube_client._disable_verify_ssl",
+        "_enable_tcp_keepalive": "airflow.kubernetes.airflow.providers.cncf.kubernetes.kube_client."
+        "_enable_tcp_keepalive",
+        "get_kube_client": "airflow.kubernetes.airflow.providers.cncf.kubernetes.kube_client.get_kube_client",
+    },
+    "pod_generator": {
+        "datetime_to_label_safe_datestring": "airflow.providers.cncf.kubernetes.pod_generator"
+        ".datetime_to_label_safe_datestring",
+        "extend_object_field": "airflow.kubernetes.airflow.providers.cncf.kubernetes.pod_generator."
+        "extend_object_field",
+        "label_safe_datestring_to_datetime": "airflow.providers.cncf.kubernetes.pod_generator."
+        "label_safe_datestring_to_datetime",
+        "make_safe_label_value": "airflow.providers.cncf.kubernetes.pod_generator.make_safe_label_value",
+        "merge_objects": "airflow.providers.cncf.kubernetes.pod_generator.merge_objects",
+        "PodGenerator": "airflow.providers.cncf.kubernetes.pod_generator.PodGenerator",
+    },
+    "pod_generator_deprecated": {
+        "make_safe_label_value": "airflow.providers.cncf.kubernetes.pod_generator_deprecated."
+        "make_safe_label_value",
+        "PodDefaults": "airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodDefaults",
+        "PodGenerator": "airflow.providers.cncf.kubernetes.pod_generator_deprecated.PodGenerator",
+    },
+    "secret": {
+        "Secret": "airflow.providers.cncf.kubernetes.secret.Secret",
+    },
+}
+add_deprecated_classes(
+    __deprecated_classes,
+    __name__,
+    __override_deprecated_names,
+    "The `cncf.kubernetes` provider must be >= 7.4.0 for that.",
+)
diff --git a/airflow/kubernetes/pod_runtime_info_env.py b/airflow/kubernetes/pre_7_4_0_compatibility/__init__.py
similarity index 57%
rename from airflow/kubernetes/pod_runtime_info_env.py
rename to airflow/kubernetes/pre_7_4_0_compatibility/__init__.py
index 32e178263b..18c84b6d03 100644
--- a/airflow/kubernetes/pod_runtime_info_env.py
+++ b/airflow/kubernetes/pre_7_4_0_compatibility/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,19 +14,18 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use :mod:`kubernetes.client.models.V1EnvVar`."""
 from __future__ import annotations
 
+# All the classes in this module should only be kept for backwards-compatibility reasons.
+# old cncf.kubernetes providers will use those in their frozen version for pre-7.4.0 release
 import warnings
 
-from airflow.exceptions import RemovedInAirflow3Warning
-
-with warnings.catch_warnings():
-    warnings.simplefilter("ignore", RemovedInAirflow3Warning)
-    from airflow.providers.cncf.kubernetes.backcompat.pod_runtime_info_env import PodRuntimeInfoEnv  # noqa
-
 warnings.warn(
-    "This module is deprecated. Please use `kubernetes.client.models.V1EnvVar`.",
-    RemovedInAirflow3Warning,
+    "This module is deprecated. The `cncf.kubernetes` provider before version 7.4.0 uses this module - "
+    "you should migrate to a newer version of `cncf.kubernetes` to get rid of this warning. If you "
+    "import the module via `airflow.kubernetes` import, please use `cncf.kubernetes' "
+    "provider 7.4.0+ and switch all your imports to use `apache.airflow.providers.cncf.kubernetes` "
+    "to get rid of the warning.",
+    DeprecationWarning,
     stacklevel=2,
 )
diff --git a/airflow/kubernetes/k8s_model.py b/airflow/kubernetes/pre_7_4_0_compatibility/k8s_model.py
similarity index 100%
copy from airflow/kubernetes/k8s_model.py
copy to airflow/kubernetes/pre_7_4_0_compatibility/k8s_model.py
diff --git a/airflow/kubernetes/kube_client.py b/airflow/kubernetes/pre_7_4_0_compatibility/kube_client.py
similarity index 100%
copy from airflow/kubernetes/kube_client.py
copy to airflow/kubernetes/pre_7_4_0_compatibility/kube_client.py
diff --git a/airflow/kubernetes/pod_generator.py b/airflow/kubernetes/pre_7_4_0_compatibility/pod_generator.py
similarity index 95%
copy from airflow/kubernetes/pod_generator.py
copy to airflow/kubernetes/pre_7_4_0_compatibility/pod_generator.py
index 844a1abfd8..aaacc8ce45 100644
--- a/airflow/kubernetes/pod_generator.py
+++ b/airflow/kubernetes/pre_7_4_0_compatibility/pod_generator.py
@@ -15,7 +15,8 @@
 # specific language governing permissions and limitations
 # under the License.
 """
-Pod generator.
+Pod generator compatible with cncf-providers released before 2.7.0 of airflow (so pre-7.4.0 of
+the cncf.kubernetes provider).
 
 This module provides an interface between the previous Pod
 API and outputs a kubernetes.client.models.V1Pod.
@@ -28,6 +29,8 @@ import copy
 import datetime
 import logging
 import os
+import secrets
+import string
 import warnings
 from functools import reduce
 
@@ -42,8 +45,10 @@ from airflow.exceptions import (
     PodReconciliationError,
     RemovedInAirflow3Warning,
 )
-from airflow.kubernetes.kubernetes_helper_functions import add_pod_suffix, rand_str
-from airflow.kubernetes.pod_generator_deprecated import PodDefaults, PodGenerator as PodGeneratorDeprecated
+from airflow.kubernetes.pre_7_4_0_compatibility.pod_generator_deprecated import (
+    PodDefaults,
+    PodGenerator as PodGeneratorDeprecated,
+)
 from airflow.utils import yaml
 from airflow.utils.hashlib_wrapper import md5
 from airflow.version import version as airflow_version
@@ -52,6 +57,28 @@ log = logging.getLogger(__name__)
 
 MAX_LABEL_LEN = 63
 
+alphanum_lower = string.ascii_lowercase + string.digits
+
+
+def rand_str(num):
+    """Generate random lowercase alphanumeric string of length num.
+
+    :meta private:
+    """
+    return "".join(secrets.choice(alphanum_lower) for _ in range(num))
+
+
+def add_pod_suffix(pod_name: str, rand_len: int = 8, max_len: int = 80) -> str:
+    """Add random string to pod name while staying under max length.
+
+    :param pod_name: name of the pod
+    :param rand_len: length of the random string to append
+    :max_len: maximum length of the pod name
+    :meta private:
+    """
+    suffix = "-" + rand_str(rand_len)
+    return pod_name[: max_len - len(suffix)].strip("-.") + suffix
+
 
 def make_safe_label_value(string: str) -> str:
     """
diff --git a/airflow/kubernetes/pod_generator_deprecated.py b/airflow/kubernetes/pre_7_4_0_compatibility/pod_generator_deprecated.py
similarity index 100%
copy from airflow/kubernetes/pod_generator_deprecated.py
copy to airflow/kubernetes/pre_7_4_0_compatibility/pod_generator_deprecated.py
diff --git a/airflow/kubernetes/secret.py b/airflow/kubernetes/pre_7_4_0_compatibility/secret.py
similarity index 98%
copy from airflow/kubernetes/secret.py
copy to airflow/kubernetes/pre_7_4_0_compatibility/secret.py
index 32ce92e2de..14295f5c7a 100644
--- a/airflow/kubernetes/secret.py
+++ b/airflow/kubernetes/pre_7_4_0_compatibility/secret.py
@@ -23,7 +23,7 @@ import uuid
 from kubernetes.client import models as k8s
 
 from airflow.exceptions import AirflowConfigException
-from airflow.kubernetes.k8s_model import K8SModel
+from airflow.kubernetes.pre_7_4_0_compatibility.k8s_model import K8SModel
 
 
 class Secret(K8SModel):
diff --git a/airflow/kubernetes/volume.py b/airflow/kubernetes/volume.py
deleted file mode 100644
index ecb39e457f..0000000000
--- a/airflow/kubernetes/volume.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-"""This module is deprecated. Please use :mod:`kubernetes.client.models.V1Volume`."""
-from __future__ import annotations
-
-import warnings
-
-from airflow.exceptions import RemovedInAirflow3Warning
-
-with warnings.catch_warnings():
-    warnings.simplefilter("ignore", RemovedInAirflow3Warning)
-    from airflow.providers.cncf.kubernetes.backcompat.volume import Volume  # noqa: autoflake
-
-warnings.warn(
-    "This module is deprecated. Please use `kubernetes.client.models.V1Volume`.",
-    RemovedInAirflow3Warning,
-    stacklevel=2,
-)
diff --git a/airflow/kubernetes/volume_mount.py b/airflow/kubernetes/volume_mount.py
deleted file mode 100644
index e65351d85f..0000000000
--- a/airflow/kubernetes/volume_mount.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-"""This module is deprecated. Please use :mod:`kubernetes.client.models.V1VolumeMount`."""
-from __future__ import annotations
-
-import warnings
-
-from airflow.exceptions import RemovedInAirflow3Warning
-
-with warnings.catch_warnings():
-    warnings.simplefilter("ignore", RemovedInAirflow3Warning)
-    from airflow.providers.cncf.kubernetes.backcompat.volume_mount import VolumeMount  # noqa: autoflake
-
-warnings.warn(
-    "This module is deprecated. Please use `kubernetes.client.models.V1VolumeMount`.",
-    RemovedInAirflow3Warning,
-    stacklevel=2,
-)
diff --git a/airflow/models/base.py b/airflow/models/base.py
index 9965de7ec7..5f6b7e9893 100644
--- a/airflow/models/base.py
+++ b/airflow/models/base.py
@@ -74,7 +74,7 @@ def get_id_collation_args():
         return {}
 
 
-COLLATION_ARGS = get_id_collation_args()
+COLLATION_ARGS: dict[str, Any] = get_id_collation_args()
 
 
 def StringID(*, length=ID_LEN, **kwargs) -> String:
diff --git a/airflow/models/renderedtifields.py b/airflow/models/renderedtifields.py
index 4b2f80e47d..269af8276a 100644
--- a/airflow/models/renderedtifields.py
+++ b/airflow/models/renderedtifields.py
@@ -101,7 +101,11 @@ class RenderedTaskInstanceFields(Base):
             ti.render_templates()
         self.task = ti.task
         if os.environ.get("AIRFLOW_IS_K8S_EXECUTOR_POD", None):
-            self.k8s_pod_yaml = ti.render_k8s_pod_yaml()
+            # we can safely import it here from provider. In Airflow 2.7.0+ you need to have new version
+            # of kubernetes provider installed to reach this place
+            from airflow.providers.cncf.kubernetes.template_rendering import render_k8s_pod_yaml
+
+            self.k8s_pod_yaml = render_k8s_pod_yaml(ti)
         self.rendered_fields = {
             field: serialize_template_field(getattr(self.task, field)) for field in self.task.template_fields
         }
diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py
index 520d07f092..6eafbab4a4 100644
--- a/airflow/models/taskinstance.py
+++ b/airflow/models/taskinstance.py
@@ -2251,19 +2251,6 @@ class TaskInstance(Base, LoggingMixin):
                 "rendering of template_fields."
             ) from e
 
-    @provide_session
-    def get_rendered_k8s_spec(self, session: Session = NEW_SESSION):
-        """Fetch rendered template fields from DB."""
-        from airflow.models.renderedtifields import RenderedTaskInstanceFields
-
-        rendered_k8s_spec = RenderedTaskInstanceFields.get_k8s_pod_yaml(self, session=session)
-        if not rendered_k8s_spec:
-            try:
-                rendered_k8s_spec = self.render_k8s_pod_yaml()
-            except (TemplateAssertionError, UndefinedError) as e:
-                raise AirflowException(f"Unable to render a k8s spec for this taskinstance: {e}") from e
-        return rendered_k8s_spec
-
     def overwrite_params_with_dag_run_conf(self, params, dag_run):
         """Overwrite Task Params with DagRun.conf."""
         if dag_run and dag_run.conf:
@@ -2290,32 +2277,51 @@ class TaskInstance(Base, LoggingMixin):
         return original_task
 
     def render_k8s_pod_yaml(self) -> dict | None:
-        """Render k8s pod yaml."""
-        from kubernetes.client.api_client import ApiClient
-
-        from airflow.kubernetes.kube_config import KubeConfig
-        from airflow.kubernetes.kubernetes_helper_functions import create_pod_id  # Circular import
-        from airflow.kubernetes.pod_generator import PodGenerator
+        """Render the k8s pod yaml."""
+        try:
+            from airflow.providers.cncf.kubernetes.template_rendering import (
+                render_k8s_pod_yaml as render_k8s_pod_yaml_from_provider,
+            )
+        except ImportError:
+            raise RuntimeError(
+                "You need to have the `cncf.kubernetes` provider installed to use this feature. "
+                "Also rather than calling it directly you should import "
+                "render_k8s_pod_yaml from airflow.providers.cncf.kubernetes.template_rendering "
+                "and call it with TaskInstance as the first argument."
+            )
+        warnings.warn(
+            "You should not call `task_instance.render_k8s_pod_yaml` directly. This method will be removed"
+            "in Airflow 3. Rather than calling it directly you should import "
+            "`render_k8s_pod_yaml` from `airflow.providers.cncf.kubernetes.template_rendering` "
+            "and call it with `TaskInstance` as the first argument.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        return render_k8s_pod_yaml_from_provider(self)
 
-        kube_config = KubeConfig()
-        pod = PodGenerator.construct_pod(
-            dag_id=self.dag_id,
-            run_id=self.run_id,
-            task_id=self.task_id,
-            map_index=self.map_index,
-            date=None,
-            pod_id=create_pod_id(self.dag_id, self.task_id),
-            try_number=self.try_number,
-            kube_image=kube_config.kube_image,
-            args=self.command_as_list(),
-            pod_override_object=PodGenerator.from_obj(self.executor_config),
-            scheduler_job_id="0",
-            namespace=kube_config.executor_namespace,
-            base_worker_pod=PodGenerator.deserialize_model_file(kube_config.pod_template_file),
-            with_mutation_hook=True,
+    @provide_session
+    def get_rendered_k8s_spec(self, session: Session = NEW_SESSION):
+        """Render the k8s pod yaml."""
+        try:
+            from airflow.providers.cncf.kubernetes.template_rendering import (
+                get_rendered_k8s_spec as get_rendered_k8s_spec_from_provider,
+            )
+        except ImportError:
+            raise RuntimeError(
+                "You need to have the `cncf.kubernetes` provider installed to use this feature. "
+                "Also rather than calling it directly you should import "
+                "`get_rendered_k8s_spec` from `airflow.providers.cncf.kubernetes.template_rendering` "
+                "and call it with `TaskInstance` as the first argument."
+            )
+        warnings.warn(
+            "You should not call `task_instance.render_k8s_pod_yaml` directly. This method will be removed"
+            "in Airflow 3. Rather than calling it directly you should import "
+            "`get_rendered_k8s_spec` from `airflow.providers.cncf.kubernetes.template_rendering` "
+            "and call it with `TaskInstance` as the first argument.",
+            DeprecationWarning,
+            stacklevel=2,
         )
-        sanitized_pod = ApiClient().sanitize_for_serialization(pod)
-        return sanitized_pod
+        return get_rendered_k8s_spec_from_provider(self, session=session)
 
     def get_email_subject_content(
         self, exception: BaseException, task: BaseOperator | None = None
diff --git a/airflow/providers/amazon/aws/notifications/chime.py b/airflow/providers/amazon/aws/notifications/chime.py
index c505b3e227..d62b4b21d6 100644
--- a/airflow/providers/amazon/aws/notifications/chime.py
+++ b/airflow/providers/amazon/aws/notifications/chime.py
@@ -53,7 +53,7 @@ class ChimeNotifier(BaseNotifier):
         """To reduce overhead cache the hook for the notifier."""
         return ChimeWebhookHook(chime_conn_id=self.chime_conn_id)
 
-    def notify(self, context: Context) -> None:
+    def notify(self, context: Context) -> None:  # type: ignore[override]
         """Send a message to a Chime Chat Room."""
         self.hook.send_message(message=self.message)
 
diff --git a/airflow/providers/apache/spark/CHANGELOG.rst b/airflow/providers/apache/spark/CHANGELOG.rst
index 8129b380a3..c855333665 100644
--- a/airflow/providers/apache/spark/CHANGELOG.rst
+++ b/airflow/providers/apache/spark/CHANGELOG.rst
@@ -29,6 +29,25 @@
 Changelog
 ---------
 
+4.1.2
+.....
+
+.. note::
+
+    The provider now expects ``apache-airflow-providers-cncf-kubernetes`` in version 7.4.0+ installed
+    in order to run Spark on Kubernetes jobs. You can install the provider with ``cncf.kubernetes`` extra with
+    ``pip install apache-airflow-providers-spark[cncf.kubernetes]`` to get the right version of the
+    ``cncf.kubernetes`` provider installed.
+
+
+.. Below changes are excluded from the changelog. Move them to
+   appropriate section above if needed. Do not delete the lines(!):
+   * ``Prepare docs for July 2023 wave of Providers (RC2) (#32381)``
+   * ``Remove spurious headers for provider changelogs (#32373)``
+   * ``Prepare docs for July 2023 wave of Providers (#32298)``
+   * ``D205 Support - Providers: Apache to Common (inclusive) (#32226)``
+   * ``Improve provider documentation and README structure (#32125)``
+
 4.1.1
 .....
 
diff --git a/airflow/providers/apache/spark/__init__.py b/airflow/providers/apache/spark/__init__.py
index 888104a68b..298c8a54f9 100644
--- a/airflow/providers/apache/spark/__init__.py
+++ b/airflow/providers/apache/spark/__init__.py
@@ -28,7 +28,7 @@ import packaging.version
 
 __all__ = ["__version__"]
 
-__version__ = "4.1.1"
+__version__ = "4.1.2"
 
 try:
     from airflow import __version__ as airflow_version
diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py
index 5687df8ac8..22fd6d1436 100644
--- a/airflow/providers/apache/spark/hooks/spark_submit.py
+++ b/airflow/providers/apache/spark/hooks/spark_submit.py
@@ -31,7 +31,7 @@ from airflow.security.kerberos import renew_from_kt
 from airflow.utils.log.logging_mixin import LoggingMixin
 
 with contextlib.suppress(ImportError, NameError):
-    from airflow.kubernetes import kube_client
+    from airflow.providers.cncf.kubernetes import kube_client
 
 ALLOWED_SPARK_BINARIES = ["spark-submit", "spark2-submit", "spark3-submit"]
 
diff --git a/airflow/providers/apache/spark/provider.yaml b/airflow/providers/apache/spark/provider.yaml
index 134e7b709e..30878d4913 100644
--- a/airflow/providers/apache/spark/provider.yaml
+++ b/airflow/providers/apache/spark/provider.yaml
@@ -23,6 +23,7 @@ description: |
 
 suspended: false
 versions:
+  - 4.1.2
   - 4.1.1
   - 4.1.0
   - 4.0.1
@@ -76,3 +77,8 @@ connection-types:
     connection-type: spark_sql
   - hook-class-name: airflow.providers.apache.spark.hooks.spark_submit.SparkSubmitHook
     connection-type: spark
+
+additional-extras:
+  - name: cncf.kubernetes
+    dependencies:
+      - apache-airflow-providers-cncf-kubernetes>=7.4.0
diff --git a/airflow/providers/celery/CHANGELOG.rst b/airflow/providers/celery/CHANGELOG.rst
index 0d0038cc36..807f7db9ea 100644
--- a/airflow/providers/celery/CHANGELOG.rst
+++ b/airflow/providers/celery/CHANGELOG.rst
@@ -33,7 +33,22 @@ Changelog
 .. note::
   This provider release is the first release that has Celery Executor and
   Celery Kubernetes Executor moved from the core ``apache-airflow`` package to a Celery
-  provider package.
+  provider package. It also expects ``apache-airflow-providers-cncf-kubernetes`` in version 7.4.0+ installed
+  in order to use ``CeleryKubernetesExecutor``. You can install the provider with ``cncf.kubernetes`` extra
+  with ``pip install apache-airflow-providers-celery[cncf.kubernetes]`` to get the right version of the
+  ``cncf.kubernetes`` provider installed.
+
+.. Review and move the new changes to one of the sections above:
+   * ``Introduce decorator to load providers configuration (#32765)``
+   * ``Allow configuration to be contributed by providers (#32604)``
+   * ``Move default_celery.py to inside the provider (#32628)``
+   * ``Add Executors discovery and documentation (#32532)``
+   * ``Move CeleryExecutor to the celery provider (#32526)``
+   * ``Prepare docs for July 2023 wave of Providers (RC2) (#32381)``
+   * ``Remove spurious headers for provider changelogs (#32373)``
+   * ``Prepare docs for July 2023 wave of Providers (#32298)``
+   * ``D205 Support - Providers: Apache to Common (inclusive) (#32226)``
+   * ``Improve provider documentation and README structure (#32125)``
 
 3.2.1
 .....
diff --git a/airflow/providers/celery/__init__.py b/airflow/providers/celery/__init__.py
index 80c2021e93..1ff2700b3b 100644
--- a/airflow/providers/celery/__init__.py
+++ b/airflow/providers/celery/__init__.py
@@ -28,7 +28,7 @@ import packaging.version
 
 __all__ = ["__version__"]
 
-__version__ = "3.2.1"
+__version__ = "3.3.0"
 
 try:
     from airflow import __version__ as airflow_version
diff --git a/airflow/providers/celery/executors/celery_kubernetes_executor.py b/airflow/providers/celery/executors/celery_kubernetes_executor.py
index d79b3cd1db..c5bbaac081 100644
--- a/airflow/providers/celery/executors/celery_kubernetes_executor.py
+++ b/airflow/providers/celery/executors/celery_kubernetes_executor.py
@@ -23,8 +23,15 @@ from typing import TYPE_CHECKING, Sequence
 from airflow.callbacks.base_callback_sink import BaseCallbackSink
 from airflow.callbacks.callback_requests import CallbackRequest
 from airflow.configuration import conf
-from airflow.executors.kubernetes_executor import KubernetesExecutor
 from airflow.providers.celery.executors.celery_executor import CeleryExecutor
+
+try:
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubernetesExecutor
+except ImportError as e:
+    from airflow.exceptions import AirflowOptionalProviderFeatureException
+
+    raise AirflowOptionalProviderFeatureException(e)
+
 from airflow.utils.log.logging_mixin import LoggingMixin
 from airflow.utils.providers_configuration_loader import providers_configuration_loaded
 
diff --git a/airflow/providers/celery/provider.yaml b/airflow/providers/celery/provider.yaml
index 26787d4694..0eccce6890 100644
--- a/airflow/providers/celery/provider.yaml
+++ b/airflow/providers/celery/provider.yaml
@@ -308,8 +308,13 @@ config:
           type is string, it is required to pass a string that conforms to the dictionary format.
           See:
           https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/redis.html#configuration
-        version_added: 3.3.0
+        version_added: ~
         type: string
         sensitive: true
         example: '{"password": "password_for_redis_server"}'
         default: ~
+
+additional-extras:
+  - name: cncf.kubernetes
+    dependencies:
+      - apache-airflow-providers-cncf-kubernetes>=7.4.0
diff --git a/airflow/providers/cncf/kubernetes/CHANGELOG.rst b/airflow/providers/cncf/kubernetes/CHANGELOG.rst
index a356a6c6dc..92ab38bdd7 100644
--- a/airflow/providers/cncf/kubernetes/CHANGELOG.rst
+++ b/airflow/providers/cncf/kubernetes/CHANGELOG.rst
@@ -27,6 +27,15 @@
 Changelog
 ---------
 
+7.4.0
+.....
+
+.. note::
+  This provider release is the first release that has Kubernetes Executor and
+  Local Kubernetes Executor moved from the core ``apache-airflow`` package to the ``cncf.kubernetes``
+  provider package.
+
+
 7.3.0
 .....
 
@@ -310,10 +319,10 @@ Param ``node_selectors`` has been removed in ``KubernetesPodOperator``; use ``no
 The following backcompat modules for KubernetesPodOperator are removed and you must now use
 the corresponding objects from the kubernetes library:
 
-* ``airflow.providers.cncf.kubernetes.backcompat.pod``
-* ``airflow.providers.cncf.kubernetes.backcompat.pod_runtime_info_env``
-* ``airflow.providers.cncf.kubernetes.backcompat.volume``
-* ``airflow.providers.cncf.kubernetes.backcompat.volume_mount``
+* ``airflow.kubernetes.backcompat.pod``
+* ``airflow.kubernetes.backcompat.pod_runtime_info_env``
+* ``airflow.kubernetes.backcompat.volume``
+* ``airflow.kubernetes.backcompat.volume_mount``
 
 * ``Remove deprecated backcompat objects for KPO (#27518)``
 * ``Remove support for node_selectors param in KPO (#27515)``
diff --git a/tests/kubernetes/__init__.py b/airflow/providers/cncf/kubernetes/executors/__init__.py
similarity index 100%
rename from tests/kubernetes/__init__.py
rename to airflow/providers/cncf/kubernetes/executors/__init__.py
diff --git a/airflow/executors/kubernetes_executor.py b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
similarity index 93%
rename from airflow/executors/kubernetes_executor.py
rename to airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
index d525417a27..e57ca205fe 100644
--- a/airflow/executors/kubernetes_executor.py
+++ b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor.py
@@ -35,12 +35,12 @@ from typing import TYPE_CHECKING, Any, Sequence
 
 from sqlalchemy.orm import Session
 
+from airflow import AirflowException
 from airflow.configuration import conf
-from airflow.exceptions import PodMutationHookException, PodReconciliationError
 from airflow.executors.base_executor import BaseExecutor
-from airflow.executors.kubernetes_executor_types import POD_EXECUTOR_DONE_KEY
-from airflow.kubernetes.kube_config import KubeConfig
-from airflow.kubernetes.kubernetes_helper_functions import annotations_to_key
+from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import POD_EXECUTOR_DONE_KEY
+from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
+from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import annotations_to_key
 from airflow.utils.event_scheduler import EventScheduler
 from airflow.utils.log.logging_mixin import remove_escape_codes
 from airflow.utils.session import NEW_SESSION, provide_session
@@ -51,13 +51,23 @@ if TYPE_CHECKING:
     from kubernetes.client import models as k8s
 
     from airflow.executors.base_executor import CommandType
-    from airflow.executors.kubernetes_executor_types import (
+    from airflow.models.taskinstance import TaskInstance
+    from airflow.models.taskinstancekey import TaskInstanceKey
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import (
         KubernetesJobType,
         KubernetesResultsType,
     )
-    from airflow.executors.kubernetes_executor_utils import AirflowKubernetesScheduler
-    from airflow.models.taskinstance import TaskInstance
-    from airflow.models.taskinstancekey import TaskInstanceKey
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils import (
+        AirflowKubernetesScheduler,
+    )
+
+
+class PodMutationHookException(AirflowException):
+    """Raised when exception happens during Pod Mutation Hook execution."""
+
+
+class PodReconciliationError(AirflowException):
+    """Raised when an error is encountered while trying to merge pod configs."""
 
 
 class KubernetesExecutor(BaseExecutor):
@@ -98,11 +108,11 @@ class KubernetesExecutor(BaseExecutor):
     def _make_safe_label_value(self, input_value: str | datetime) -> str:
         """
         Normalize a provided label to be of valid length and characters.
-        See airflow.kubernetes.pod_generator.make_safe_label_value for more details.
+        See airflow.providers.cncf.kubernetes.pod_generator.make_safe_label_value for more details.
         """
-        # airflow.kubernetes is an expensive import, locally import it here to
+        # airflow.providers.cncf.kubernetes is an expensive import, locally import it here to
         # speed up load times of the kubernetes_executor module.
-        from airflow.kubernetes import pod_generator
+        from airflow.providers.cncf.kubernetes import pod_generator
 
         if isinstance(input_value, datetime):
             return pod_generator.datetime_to_label_safe_datestring(input_value)
@@ -188,8 +198,10 @@ class KubernetesExecutor(BaseExecutor):
         self.log.info("Start Kubernetes executor")
         self.scheduler_job_id = str(self.job_id)
         self.log.debug("Start with scheduler_job_id: %s", self.scheduler_job_id)
-        from airflow.executors.kubernetes_executor_utils import AirflowKubernetesScheduler
-        from airflow.kubernetes.kube_client import get_kube_client
+        from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils import (
+            AirflowKubernetesScheduler,
+        )
+        from airflow.providers.cncf.kubernetes.kube_client import get_kube_client
 
         self.kube_client = get_kube_client()
         self.kube_scheduler = AirflowKubernetesScheduler(
@@ -224,7 +236,7 @@ class KubernetesExecutor(BaseExecutor):
         else:
             self.log.info("Add task %s with command %s", key, command)
 
-        from airflow.kubernetes.pod_generator import PodGenerator
+        from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
 
         try:
             kube_executor_config = PodGenerator.from_obj(executor_config)
@@ -282,7 +294,7 @@ class KubernetesExecutor(BaseExecutor):
             except Empty:
                 break
 
-        from airflow.executors.kubernetes_executor_utils import ResourceVersion
+        from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils import ResourceVersion
 
         resource_instance = ResourceVersion()
         for ns in resource_instance.resource_version.keys():
@@ -386,8 +398,8 @@ class KubernetesExecutor(BaseExecutor):
         log = []
         try:
 
-            from airflow.kubernetes.kube_client import get_kube_client
-            from airflow.kubernetes.pod_generator import PodGenerator
+            from airflow.providers.cncf.kubernetes.kube_client import get_kube_client
+            from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
 
             client = get_kube_client()
 
@@ -463,7 +475,7 @@ class KubernetesExecutor(BaseExecutor):
         :param tis: List of Task Instances to clean up
         :return: List of readable task instances for a warning message
         """
-        from airflow.kubernetes.pod_generator import PodGenerator
+        from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
 
         if TYPE_CHECKING:
             assert self.kube_client
diff --git a/airflow/executors/kubernetes_executor_types.py b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py
similarity index 100%
rename from airflow/executors/kubernetes_executor_types.py
rename to airflow/providers/cncf/kubernetes/executors/kubernetes_executor_types.py
diff --git a/airflow/executors/kubernetes_executor_utils.py b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py
similarity index 93%
rename from airflow/executors/kubernetes_executor_utils.py
rename to airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py
index b1ee49cb69..c1ee9d1ebe 100644
--- a/airflow/executors/kubernetes_executor_utils.py
+++ b/airflow/providers/cncf/kubernetes/executors/kubernetes_executor_utils.py
@@ -20,7 +20,7 @@ import json
 import multiprocessing
 import time
 from queue import Empty, Queue
-from typing import TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, Generic, TypeVar
 
 from kubernetes import client, watch
 from kubernetes.client import Configuration, models as k8s
@@ -28,26 +28,50 @@ from kubernetes.client.rest import ApiException
 from urllib3.exceptions import ReadTimeoutError
 
 from airflow.exceptions import AirflowException
-from airflow.kubernetes.kube_client import get_kube_client
-from airflow.kubernetes.kubernetes_helper_functions import (
+from airflow.providers.cncf.kubernetes.kube_client import get_kube_client
+from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
     annotations_for_logging_task_metadata,
     annotations_to_key,
     create_pod_id,
 )
-from airflow.kubernetes.pod_generator import PodGenerator
+from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
 from airflow.utils.log.logging_mixin import LoggingMixin
-from airflow.utils.singleton import Singleton
 from airflow.utils.state import State
 
+try:
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import (
+        ALL_NAMESPACES,
+        POD_EXECUTOR_DONE_KEY,
+    )
+except ImportError:
+    # avoid failing import when Airflow pre 2.7 is installed
+    from airflow.kubernetes.kubernetes_executor_types import (  # type: ignore[no-redef]
+        ALL_NAMESPACES,
+        POD_EXECUTOR_DONE_KEY,
+    )
+
 if TYPE_CHECKING:
-    from airflow.executors.kubernetes_executor_types import (
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import (
         KubernetesJobType,
         KubernetesResultsType,
         KubernetesWatchType,
     )
 
+# Singleton here is duplicated version of airflow.utils.singleton.Singleton until
+# min-airflow version is 2.7.0 for the provider. then it can be imported from airflow.utils.singleton.
+
+T = TypeVar("T")
+
+
+class Singleton(type, Generic[T]):
+    """Metaclass that allows to implement singleton pattern."""
+
+    _instances: dict[Singleton[T], T] = {}
 
-from airflow.executors.kubernetes_executor_types import ALL_NAMESPACES, POD_EXECUTOR_DONE_KEY
+    def __call__(cls: Singleton[T], *args, **kwargs) -> T:
+        if cls not in cls._instances:
+            cls._instances[cls] = super().__call__(*args, **kwargs)
+        return cls._instances[cls]
 
 
 class ResourceVersion(metaclass=Singleton):
diff --git a/airflow/executors/local_kubernetes_executor.py b/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py
similarity index 99%
rename from airflow/executors/local_kubernetes_executor.py
rename to airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py
index cc95232c91..ee82252096 100644
--- a/airflow/executors/local_kubernetes_executor.py
+++ b/airflow/providers/cncf/kubernetes/executors/local_kubernetes_executor.py
@@ -22,8 +22,8 @@ from typing import TYPE_CHECKING, Sequence
 from airflow.callbacks.base_callback_sink import BaseCallbackSink
 from airflow.callbacks.callback_requests import CallbackRequest
 from airflow.configuration import conf
-from airflow.executors.kubernetes_executor import KubernetesExecutor
 from airflow.executors.local_executor import LocalExecutor
+from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubernetesExecutor
 from airflow.utils.log.logging_mixin import LoggingMixin
 
 if TYPE_CHECKING:
diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
index f237461155..56852fb1a2 100644
--- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
+++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py
@@ -30,8 +30,8 @@ from urllib3.exceptions import HTTPError
 
 from airflow.exceptions import AirflowException, AirflowNotFoundException
 from airflow.hooks.base import BaseHook
-from airflow.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive
 from airflow.models import Connection
+from airflow.providers.cncf.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive
 from airflow.providers.cncf.kubernetes.utils.pod_manager import PodOperatorHookProtocol
 from airflow.utils import yaml
 
diff --git a/airflow/kubernetes/k8s_model.py b/airflow/providers/cncf/kubernetes/k8s_model.py
similarity index 100%
rename from airflow/kubernetes/k8s_model.py
rename to airflow/providers/cncf/kubernetes/k8s_model.py
diff --git a/airflow/kubernetes/kube_client.py b/airflow/providers/cncf/kubernetes/kube_client.py
similarity index 96%
rename from airflow/kubernetes/kube_client.py
rename to airflow/providers/cncf/kubernetes/kube_client.py
index d2e791dbfd..b9dec69402 100644
--- a/airflow/kubernetes/kube_client.py
+++ b/airflow/providers/cncf/kubernetes/kube_client.py
@@ -58,6 +58,7 @@ def _enable_tcp_keepalive() -> None:
     is time-outed on services like cloud load balancers or firewalls.
 
     See https://github.com/apache/airflow/pull/11406 for detailed explanation.
+
     Please ping @michalmisiewicz or @dimberman in the PR if you want to modify this function.
     """
     import socket
@@ -90,7 +91,7 @@ def _enable_tcp_keepalive() -> None:
 
 
 def get_kube_client(
-    in_cluster: bool = conf.getboolean("kubernetes_executor", "in_cluster"),
+    in_cluster: bool | None = None,
     cluster_context: str | None = None,
     config_file: str | None = None,
 ) -> client.CoreV1Api:
@@ -100,9 +101,10 @@ def get_kube_client(
     :param in_cluster: whether we are in cluster
     :param cluster_context: context of the cluster
     :param config_file: configuration file
-    :return kubernetes client
-    :rtype client.CoreV1Api
+    :return: kubernetes client
     """
+    if in_cluster is None:
+        in_cluster = conf.getboolean("kubernetes_executor", "in_cluster")
     if not has_kubernetes:
         raise _import_err
 
diff --git a/airflow/kubernetes/kube_config.py b/airflow/providers/cncf/kubernetes/kube_config.py
similarity index 99%
rename from airflow/kubernetes/kube_config.py
rename to airflow/providers/cncf/kubernetes/kube_config.py
index 20bddf82f3..8db861a7f5 100644
--- a/airflow/kubernetes/kube_config.py
+++ b/airflow/providers/cncf/kubernetes/kube_config.py
@@ -43,7 +43,6 @@ class KubeConfig:
         self.worker_pods_creation_batch_size = conf.getint(
             self.kubernetes_section, "worker_pods_creation_batch_size"
         )
-
         self.worker_container_repository = conf.get(self.kubernetes_section, "worker_container_repository")
         self.worker_container_tag = conf.get(self.kubernetes_section, "worker_container_tag")
         if self.worker_container_repository and self.worker_container_tag:
diff --git a/tests/executors/kubernetes_executor_template_files/__init__.py b/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py
similarity index 100%
rename from tests/executors/kubernetes_executor_template_files/__init__.py
rename to airflow/providers/cncf/kubernetes/kubernetes_executor_templates/__init__.py
diff --git a/airflow/kubernetes_executor_templates/basic_template.yaml b/airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml
similarity index 100%
rename from airflow/kubernetes_executor_templates/basic_template.yaml
rename to airflow/providers/cncf/kubernetes/kubernetes_executor_templates/basic_template.yaml
diff --git a/airflow/kubernetes/kubernetes_helper_functions.py b/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py
similarity index 94%
rename from airflow/kubernetes/kubernetes_helper_functions.py
rename to airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py
index 27762421ca..b54519fe27 100644
--- a/airflow/kubernetes/kubernetes_helper_functions.py
+++ b/airflow/providers/cncf/kubernetes/kubernetes_helper_functions.py
@@ -43,8 +43,14 @@ def rand_str(num):
     return "".join(secrets.choice(alphanum_lower) for _ in range(num))
 
 
-def add_pod_suffix(*, pod_name, rand_len=8, max_len=80):
-    """Add random string to pod name while staying under max len."""
+def add_pod_suffix(*, pod_name: str, rand_len: int = 8, max_len: int = 80) -> str:
+    """Add random string to pod name while staying under max length.
+
+    :param pod_name: name of the pod
+    :param rand_len: length of the random string to append
+    :param max_len: maximum length of the pod name
+    :meta private:
+    """
     suffix = "-" + rand_str(rand_len)
     return pod_name[: max_len - len(suffix)].strip("-.") + suffix
 
diff --git a/airflow/providers/cncf/kubernetes/operators/pod.py b/airflow/providers/cncf/kubernetes/operators/pod.py
index 49940144b5..6e6acbec4e 100644
--- a/airflow/providers/cncf/kubernetes/operators/pod.py
+++ b/airflow/providers/cncf/kubernetes/operators/pod.py
@@ -35,10 +35,8 @@ from urllib3.exceptions import HTTPError
 
 from airflow.configuration import conf
 from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
-from airflow.kubernetes import pod_generator
-from airflow.kubernetes.pod_generator import PodGenerator
-from airflow.kubernetes.secret import Secret
 from airflow.models import BaseOperator
+from airflow.providers.cncf.kubernetes import pod_generator
 from airflow.providers.cncf.kubernetes.backcompat.backwards_compat_converters import (
     convert_affinity,
     convert_configmap,
@@ -51,6 +49,8 @@ from airflow.providers.cncf.kubernetes.backcompat.backwards_compat_converters im
     convert_volume_mount,
 )
 from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook
+from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
+from airflow.providers.cncf.kubernetes.secret import Secret
 from airflow.providers.cncf.kubernetes.triggers.pod import KubernetesPodTrigger
 from airflow.providers.cncf.kubernetes.utils import xcom_sidecar  # type: ignore[attr-defined]
 from airflow.providers.cncf.kubernetes.utils.pod_manager import (
diff --git a/airflow/kubernetes/pod_generator.py b/airflow/providers/cncf/kubernetes/pod_generator.py
similarity index 97%
rename from airflow/kubernetes/pod_generator.py
rename to airflow/providers/cncf/kubernetes/pod_generator.py
index 844a1abfd8..c0f2e7e89f 100644
--- a/airflow/kubernetes/pod_generator.py
+++ b/airflow/providers/cncf/kubernetes/pod_generator.py
@@ -38,14 +38,21 @@ from kubernetes.client.api_client import ApiClient
 
 from airflow.exceptions import (
     AirflowConfigException,
+    RemovedInAirflow3Warning,
+)
+from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import (
     PodMutationHookException,
     PodReconciliationError,
-    RemovedInAirflow3Warning,
 )
-from airflow.kubernetes.kubernetes_helper_functions import add_pod_suffix, rand_str
-from airflow.kubernetes.pod_generator_deprecated import PodDefaults, PodGenerator as PodGeneratorDeprecated
+from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import add_pod_suffix, rand_str
+from airflow.providers.cncf.kubernetes.pod_generator_deprecated import (
+    PodDefaults,
+    PodGenerator as PodGeneratorDeprecated,
+)
+
+# replace it with airflow.utils.hashlib_wrapper.md5 when min airflow version for k8s provider is 2.6.0
+from airflow.providers.cncf.kubernetes.utils.k8s_hashlib_wrapper import md5
 from airflow.utils import yaml
-from airflow.utils.hashlib_wrapper import md5
 from airflow.version import version as airflow_version
 
 log = logging.getLogger(__name__)
@@ -626,7 +633,10 @@ def extend_object_field(base_obj, client_obj, field_name):
     if (not isinstance(base_obj_field, list) and base_obj_field is not None) or (
         not isinstance(client_obj_field, list) and client_obj_field is not None
     ):
-        raise ValueError("The chosen field must be a list.")
+        raise ValueError(
+            f"The chosen field must be a list. Got {type(base_obj_field)} base_object_field "
+            f"and {type(client_obj_field)} client_object_field."
+        )
 
     if not base_obj_field:
         return client_obj_cp
diff --git a/airflow/kubernetes/pod_generator_deprecated.py b/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py
similarity index 97%
rename from airflow/kubernetes/pod_generator_deprecated.py
rename to airflow/providers/cncf/kubernetes/pod_generator_deprecated.py
index 8876556a8d..8d64e96d6c 100644
--- a/airflow/kubernetes/pod_generator_deprecated.py
+++ b/airflow/providers/cncf/kubernetes/pod_generator_deprecated.py
@@ -30,7 +30,8 @@ import uuid
 import re2
 from kubernetes.client import models as k8s
 
-from airflow.utils.hashlib_wrapper import md5
+# replace it with airflow.utils.hashlib_wrapper.md5 when min airflow version for k8s provider is 2.6.0
+from airflow.providers.cncf.kubernetes.utils.k8s_hashlib_wrapper import md5
 
 MAX_POD_ID_LEN = 253
 
@@ -111,7 +112,7 @@ class PodGenerator:
     :param hostnetwork: If True enable host networking on the pod
     :param tolerations: A list of kubernetes tolerations
     :param security_context: A dict containing the security context for the pod
-    :param configmaps: Any configmap refs to envfrom.
+    :param configmaps: Any configmap refs to read ``configmaps`` for environments from.
         If more than one configmap is required, provide a comma separated list
         configmap_a,configmap_b
     :param dnspolicy: Specify a dnspolicy for the pod
diff --git a/airflow/kubernetes/pod_launcher_deprecated.py b/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py
similarity index 98%
rename from airflow/kubernetes/pod_launcher_deprecated.py
rename to airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py
index fa5c884a51..9fc964cdb6 100644
--- a/airflow/kubernetes/pod_launcher_deprecated.py
+++ b/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py
@@ -32,8 +32,8 @@ from kubernetes.stream import stream as kubernetes_stream
 from requests.exceptions import HTTPError
 
 from airflow.exceptions import AirflowException, RemovedInAirflow3Warning
-from airflow.kubernetes.kube_client import get_kube_client
-from airflow.kubernetes.pod_generator import PodDefaults
+from airflow.providers.cncf.kubernetes.kube_client import get_kube_client
+from airflow.providers.cncf.kubernetes.pod_generator import PodDefaults
 from airflow.settings import pod_mutation_hook
 from airflow.utils.log.logging_mixin import LoggingMixin
 from airflow.utils.state import State
diff --git a/airflow/kubernetes/__init__.py b/airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py
similarity index 100%
copy from airflow/kubernetes/__init__.py
copy to airflow/providers/cncf/kubernetes/pod_template_file_examples/__init__.py
diff --git a/airflow/kubernetes/pod_template_file_examples/dags_in_image_template.yaml b/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml
similarity index 100%
rename from airflow/kubernetes/pod_template_file_examples/dags_in_image_template.yaml
rename to airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml
diff --git a/airflow/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml b/airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml
similarity index 100%
rename from airflow/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml
rename to airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml
diff --git a/airflow/kubernetes/pod_template_file_examples/git_sync_template.yaml b/airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml
similarity index 100%
rename from airflow/kubernetes/pod_template_file_examples/git_sync_template.yaml
rename to airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml
diff --git a/airflow/providers/cncf/kubernetes/provider.yaml b/airflow/providers/cncf/kubernetes/provider.yaml
index 86ab5f7016..d9b2966bba 100644
--- a/airflow/providers/cncf/kubernetes/provider.yaml
+++ b/airflow/providers/cncf/kubernetes/provider.yaml
@@ -23,6 +23,7 @@ description: |
 
 suspended: false
 versions:
+  - 7.4.0
   - 7.3.0
   - 7.2.0
   - 7.1.0
@@ -124,3 +125,212 @@ connection-types:
 task-decorators:
   - class-name: airflow.providers.cncf.kubernetes.decorators.kubernetes.kubernetes_task
     name: kubernetes
+
+config:
+  local_kubernetes_executor:
+    description: |
+      This section only applies if you are using the ``LocalKubernetesExecutor`` in
+      ``[core]`` section above
+    options:
+      kubernetes_queue:
+        description: |
+          Define when to send a task to ``KubernetesExecutor`` when using ``LocalKubernetesExecutor``.
+          When the queue of a task is the value of ``kubernetes_queue`` (default ``kubernetes``),
+          the task is executed via ``KubernetesExecutor``,
+          otherwise via ``LocalExecutor``
+        version_added: ~
+        type: string
+        example: ~
+        default: "kubernetes"
+  kubernetes_executor:
+    description: ~
+    options:
+      api_client_retry_configuration:
+        description: |
+          Kwargs to override the default urllib3 Retry used in the kubernetes API client
+        version_added: ~
+        type: string
+        example: '{ "total": 3, "backoff_factor": 0.5 }'
+        default: ""
+      logs_task_metadata:
+        description: |
+          Flag to control the information added to kubernetes executor logs for better traceability
+        version_added: ~
+        type: boolean
+        example: ~
+        default: "False"
+      pod_template_file:
+        description: |
+          Path to the YAML pod file that forms the basis for KubernetesExecutor workers.
+        version_added: ~
+        type: string
+        example: ~
+        default: ""
+        see_also: ":ref:`concepts:pod_template_file`"
+      worker_container_repository:
+        description: |
+          The repository of the Kubernetes Image for the Worker to Run
+        version_added: ~
+        type: string
+        example: ~
+        default: ""
+      worker_container_tag:
+        description: |
+          The tag of the Kubernetes Image for the Worker to Run
+        version_added: ~
+        type: string
+        example: ~
+        default: ""
+      namespace:
+        description: |
+          The Kubernetes namespace where airflow workers should be created. Defaults to ``default``
+        version_added: ~
+        type: string
+        example: ~
+        default: "default"
+      delete_worker_pods:
+        description: |
+          If True, all worker pods will be deleted upon termination
+        version_added: ~
+        type: string
+        example: ~
+        default: "True"
+      delete_worker_pods_on_failure:
+        description: |
+          If False (and delete_worker_pods is True),
+          failed worker pods will not be deleted so users can investigate them.
+          This only prevents removal of worker pods where the worker itself failed,
+          not when the task it ran failed.
+        version_added: ~
+        type: string
+        example: ~
+        default: "False"
+      worker_pods_creation_batch_size:
+        description: |
+          Number of Kubernetes Worker Pod creation calls per scheduler loop.
+          Note that the current default of "1" will only launch a single pod
+          per-heartbeat. It is HIGHLY recommended that users increase this
+          number to match the tolerance of their kubernetes cluster for
+          better performance.
+        version_added: ~
+        type: string
+        example: ~
+        default: "1"
+      multi_namespace_mode:
+        description: |
+          Allows users to launch pods in multiple namespaces.
+          Will require creating a cluster-role for the scheduler,
+          or use multi_namespace_mode_namespace_list configuration.
+        version_added: ~
+        type: boolean
+        example: ~
+        default: "False"
+      multi_namespace_mode_namespace_list:
+        description: |
+          If multi_namespace_mode is True while scheduler does not have a cluster-role,
+          give the list of namespaces where the scheduler will schedule jobs
+          Scheduler needs to have the necessary permissions in these namespaces.
+        version_added: ~
+        type: string
+        example: ~
+        default: ""
+      in_cluster:
+        description: |
+          Use the service account kubernetes gives to pods to connect to kubernetes cluster.
+          It's intended for clients that expect to be running inside a pod running on kubernetes.
+          It will raise an exception if called from a process not running in a kubernetes environment.
+        version_added: ~
+        type: string
+        example: ~
+        default: "True"
+      cluster_context:
+        description: |
+          When running with in_cluster=False change the default cluster_context or config_file
+          options to Kubernetes client. Leave blank these to use default behaviour like ``kubectl`` has.
+        version_added: ~
+        type: string
+        example: ~
+        default: ~
+      config_file:
+        description: |
+          Path to the kubernetes configfile to be used when ``in_cluster`` is set to False
+        version_added: ~
+        type: string
+        example: ~
+        default: ~
+      kube_client_request_args:
+        description: |
+          Keyword parameters to pass while calling a kubernetes client core_v1_api methods
+          from Kubernetes Executor provided as a single line formatted JSON dictionary string.
+          List of supported params are similar for all core_v1_apis, hence a single config
+          variable for all apis. See:
+          https://raw.githubusercontent.com/kubernetes-client/python/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/api/core_v1_api.py
+        version_added: ~
+        type: string
+        example: ~
+        default: ""
+      delete_option_kwargs:
+        description: |
+          Optional keyword arguments to pass to the ``delete_namespaced_pod`` kubernetes client
+          ``core_v1_api`` method when using the Kubernetes Executor.
+          This should be an object and can contain any of the options listed in the ``v1DeleteOptions``
+          class defined here:
+          https://github.com/kubernetes-client/python/blob/41f11a09995efcd0142e25946adc7591431bfb2f/kubernetes/client/models/v1_delete_options.py#L19
+        version_added: ~
+        type: string
+        example: '{"grace_period_seconds": 10}'
+        default: ""
+      enable_tcp_keepalive:
+        description: |
+          Enables TCP keepalive mechanism. This prevents Kubernetes API requests to hang indefinitely
+          when idle connection is time-outed on services like cloud load balancers or firewalls.
+        version_added: ~
+        type: boolean
+        example: ~
+        default: "True"
+      tcp_keep_idle:
+        description: |
+          When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has
+          been idle for `tcp_keep_idle` seconds.
+        version_added: ~
+        type: integer
+        example: ~
+        default: "120"
+      tcp_keep_intvl:
+        description: |
+          When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond
+          to a keepalive probe, TCP retransmits the probe after `tcp_keep_intvl` seconds.
+        version_added: ~
+        type: integer
+        example: ~
+        default: "30"
+      tcp_keep_cnt:
+        description: |
+          When the `enable_tcp_keepalive` option is enabled, if Kubernetes API does not respond
+          to a keepalive probe, TCP retransmits the probe `tcp_keep_cnt number` of times before
+          a connection is considered to be broken.
+        version_added: ~
+        type: integer
+        example: ~
+        default: "6"
+      verify_ssl:
+        description: |
+          Set this to false to skip verifying SSL certificate of Kubernetes python client.
+        version_added: ~
+        type: boolean
+        example: ~
+        default: "True"
+      worker_pods_queued_check_interval:
+        description: |
+          How often in seconds to check for task instances stuck in "queued" status without a pod
+        version_added: ~
+        type: integer
+        example: ~
+        default: "60"
+      ssl_ca_cert:
+        description: |
+          Path to a CA certificate to be used by the Kubernetes client to verify the server's SSL certificate.
+        version_added: ~
+        type: string
+        example: ~
+        default: ""
diff --git a/airflow/kubernetes/secret.py b/airflow/providers/cncf/kubernetes/secret.py
similarity index 98%
rename from airflow/kubernetes/secret.py
rename to airflow/providers/cncf/kubernetes/secret.py
index 32ce92e2de..d4fba36f16 100644
--- a/airflow/kubernetes/secret.py
+++ b/airflow/providers/cncf/kubernetes/secret.py
@@ -23,7 +23,7 @@ import uuid
 from kubernetes.client import models as k8s
 
 from airflow.exceptions import AirflowConfigException
-from airflow.kubernetes.k8s_model import K8SModel
+from airflow.providers.cncf.kubernetes.k8s_model import K8SModel
 
 
 class Secret(K8SModel):
diff --git a/airflow/providers/cncf/kubernetes/template_rendering.py b/airflow/providers/cncf/kubernetes/template_rendering.py
new file mode 100644
index 0000000000..5461284e46
--- /dev/null
+++ b/airflow/providers/cncf/kubernetes/template_rendering.py
@@ -0,0 +1,67 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+from jinja2 import TemplateAssertionError, UndefinedError
+from kubernetes.client.api_client import ApiClient
+
+from airflow import AirflowException
+from airflow.models.taskinstance import TaskInstance
+from airflow.providers.cncf.kubernetes.kube_config import KubeConfig
+from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
+    create_pod_id,
+)
+from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
+from airflow.utils.session import NEW_SESSION, provide_session
+
+
+def render_k8s_pod_yaml(task_instance: TaskInstance) -> dict | None:
+    """Render k8s pod yaml."""
+    kube_config = KubeConfig()
+    pod = PodGenerator.construct_pod(
+        dag_id=task_instance.dag_id,
+        run_id=task_instance.run_id,
+        task_id=task_instance.task_id,
+        map_index=task_instance.map_index,
+        date=None,
+        pod_id=create_pod_id(task_instance.dag_id, task_instance.task_id),
+        try_number=task_instance.try_number,
+        kube_image=kube_config.kube_image,
+        args=task_instance.command_as_list(),
+        pod_override_object=PodGenerator.from_obj(task_instance.executor_config),
+        scheduler_job_id="0",
+        namespace=kube_config.executor_namespace,
+        base_worker_pod=PodGenerator.deserialize_model_file(kube_config.pod_template_file),
+        with_mutation_hook=True,
+    )
+    sanitized_pod = ApiClient().sanitize_for_serialization(pod)
+    return sanitized_pod
+
+
+@provide_session
+def get_rendered_k8s_spec(task_instance: TaskInstance, session=NEW_SESSION) -> dict | None:
+    """Fetch rendered template fields from DB."""
+    from airflow.models.renderedtifields import RenderedTaskInstanceFields
+
+    rendered_k8s_spec = RenderedTaskInstanceFields.get_k8s_pod_yaml(task_instance, session=session)
+    if not rendered_k8s_spec:
+        try:
+            rendered_k8s_spec = render_k8s_pod_yaml(task_instance)
+        except (TemplateAssertionError, UndefinedError) as e:
+            raise AirflowException(f"Unable to render a k8s spec for this taskinstance: {e}") from e
+    return rendered_k8s_spec
diff --git a/airflow/kubernetes/pod.py b/airflow/providers/cncf/kubernetes/utils/k8s_hashlib_wrapper.py
similarity index 53%
rename from airflow/kubernetes/pod.py
rename to airflow/providers/cncf/kubernetes/utils/k8s_hashlib_wrapper.py
index 629cbad17c..72a1f8dac6 100644
--- a/airflow/kubernetes/pod.py
+++ b/airflow/providers/cncf/kubernetes/utils/k8s_hashlib_wrapper.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,24 +14,29 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""
-This module is deprecated.
-Please use :mod:`kubernetes.client.models` for `V1ResourceRequirements` and `Port`.
-"""
+
+# this is a backcompat hashlib wrapper for kubernetes provider. It should be removed (with corresponding
+# imports in the provider when min airflow version is 2.6.0 for the provider
+
 from __future__ import annotations
 
-import warnings
+import hashlib
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from _typeshed import ReadableBuffer
 
-from airflow.exceptions import RemovedInAirflow3Warning
+from airflow import PY39
 
-# flake8: noqa
 
-with warnings.catch_warnings():
-    warnings.simplefilter("ignore", RemovedInAirflow3Warning)
-    from airflow.providers.cncf.kubernetes.backcompat.pod import Port, Resources
+def md5(__string: ReadableBuffer = b"") -> hashlib._Hash:
+    """
+    Safely allows calling the ``hashlib.md5`` function when ``usedforsecurity`` is disabled in
+    the configuration.
 
-warnings.warn(
-    "This module is deprecated. Please use `kubernetes.client.models` for `V1ResourceRequirements` and `Port`.",
-    RemovedInAirflow3Warning,
-    stacklevel=2,
-)
+    :param __string: The data to hash. Default to empty str byte.
+    :return: The hashed value.
+    """
+    if PY39:
+        return hashlib.md5(__string, usedforsecurity=False)  # type: ignore
+    return hashlib.md5(__string)
diff --git a/airflow/providers/cncf/kubernetes/utils/pod_manager.py b/airflow/providers/cncf/kubernetes/utils/pod_manager.py
index acd40d4b17..c8ac74382d 100644
--- a/airflow/providers/cncf/kubernetes/utils/pod_manager.py
+++ b/airflow/providers/cncf/kubernetes/utils/pod_manager.py
@@ -43,7 +43,7 @@ from urllib3.exceptions import HTTPError as BaseHTTPError
 from urllib3.response import HTTPResponse
 
 from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
-from airflow.kubernetes.pod_generator import PodDefaults
+from airflow.providers.cncf.kubernetes.pod_generator import PodDefaults
 from airflow.typing_compat import Literal, Protocol
 from airflow.utils.log.logging_mixin import LoggingMixin
 from airflow.utils.timezone import utcnow
@@ -89,7 +89,7 @@ class PodOperatorHookProtocol(Protocol):
 
     @property
     def core_v1_client(self) -> client.CoreV1Api:
-        """Get authenticated CoreV1Api object."""
+        """Get authenticated client object."""
 
     @property
     def is_in_cluster(self) -> bool:
diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py
index 60e2d4b02d..5efa3b3da5 100644
--- a/airflow/serialization/serialized_objects.py
+++ b/airflow/serialization/serialized_objects.py
@@ -73,7 +73,7 @@ if TYPE_CHECKING:
     try:
         from kubernetes.client import models as k8s
 
-        from airflow.kubernetes.pod_generator import PodGenerator
+        from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
     except ImportError:
         pass
 
@@ -1482,7 +1482,12 @@ def _has_kubernetes() -> bool:
     try:
         from kubernetes.client import models as k8s
 
-        from airflow.kubernetes.pod_generator import PodGenerator
+        try:
+            from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
+        except ImportError:
+            from airflow.kubernetes.pre_7_4_0_compatibility.pod_generator import (  # type: ignore[assignment]
+                PodGenerator,
+            )
 
         globals()["k8s"] = k8s
         globals()["PodGenerator"] = PodGenerator
diff --git a/airflow/serialization/serializers/kubernetes.py b/airflow/serialization/serializers/kubernetes.py
index d38836affc..f095400ee9 100644
--- a/airflow/serialization/serializers/kubernetes.py
+++ b/airflow/serialization/serializers/kubernetes.py
@@ -44,7 +44,12 @@ def serialize(o: object) -> tuple[U, str, int, bool]:
         return "", "", 0, False
 
     if isinstance(o, (k8s.V1Pod, k8s.V1ResourceRequirements)):
-        from airflow.kubernetes.pod_generator import PodGenerator
+        try:
+            from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
+        except ImportError:
+            from airflow.kubernetes.pre_7_4_0_compatibility.pod_generator import (  # type: ignore[assignment]
+                PodGenerator,
+            )
 
         # We're running this in an except block, so we don't want it to fail
         # under any circumstances, e.g. accessing a non-existing attribute.
diff --git a/airflow/timetables/_cron.py b/airflow/timetables/_cron.py
index 49f5771966..89cae4bdcb 100644
--- a/airflow/timetables/_cron.py
+++ b/airflow/timetables/_cron.py
@@ -65,10 +65,10 @@ class CronMixin:
             # as Croniter has inconsistent evaluation with other libraries
             if len(croniter(self._expression).expanded) > 5:
                 raise FormatException()
-            interval_description = descriptor.get_description()
+            interval_description: str = descriptor.get_description()
         except (CroniterBadCronError, FormatException, MissingFieldException):
             interval_description = ""
-        self.description = interval_description
+        self.description: str = interval_description
 
     def __eq__(self, other: Any) -> bool:
         """Both expression and timezone should match.
diff --git a/airflow/utils/deprecation_tools.py b/airflow/utils/deprecation_tools.py
index a189fe5479..6be6650f09 100644
--- a/airflow/utils/deprecation_tools.py
+++ b/airflow/utils/deprecation_tools.py
@@ -23,25 +23,75 @@ import warnings
 from types import ModuleType
 
 
-def getattr_with_deprecation(imports: dict[str, str], module: str, name: str):
+def getattr_with_deprecation(
+    imports: dict[str, str],
+    module: str,
+    override_deprecated_classes: dict[str, str],
+    extra_message: str,
+    name: str,
+):
+    """
+    Retrieve the imported attribute from the redirected module and raises a deprecation warning.
+
+    :param imports: dict of imports and their redirection for the module
+    :param module: name of the module in the package to get the attribute from
+    :param override_deprecated_classes: override target classes with deprecated ones. If target class is
+       found in the dictionary, it will be displayed in the warning message.
+    :param extra_message: extra message to display in the warning or import error message
+    :param name: attribute name
+    :return:
+    """
     target_class_full_name = imports.get(name)
     if not target_class_full_name:
         raise AttributeError(f"The module `{module!r}` has no attribute `{name!r}`")
-    warnings.warn(
-        f"The `{module}.{name}` class is deprecated. Please use `{target_class_full_name!r}`.",
-        DeprecationWarning,
-        stacklevel=2,
-    )
+    warning_class_name = target_class_full_name
+    if override_deprecated_classes and name in override_deprecated_classes:
+        warning_class_name = override_deprecated_classes[name]
+    message = f"The `{module}.{name}` class is deprecated. Please use `{warning_class_name!r}`."
+    if extra_message:
+        message += f" {extra_message}."
+    warnings.warn(message, DeprecationWarning, stacklevel=2)
     new_module, new_class_name = target_class_full_name.rsplit(".", 1)
-    return getattr(importlib.import_module(new_module), new_class_name)
+    try:
+        return getattr(importlib.import_module(new_module), new_class_name)
+    except ImportError as e:
+        error_message = (
+            f"Could not import `{new_module}.{new_class_name}` while trying to import `{module}.{name}`."
+        )
+        if extra_message:
+            error_message += f" {extra_message}."
+        raise ImportError(error_message) from e
 
 
-def add_deprecated_classes(module_imports: dict[str, dict[str, str]], package: str):
+def add_deprecated_classes(
+    module_imports: dict[str, dict[str, str]],
+    package: str,
+    override_deprecated_classes: dict[str, dict[str, str]] | None = None,
+    extra_message: str | None = None,
+):
+    """
+    Add deprecated class PEP-563 imports and warnings modules to the package.
+
+    :param module_imports: imports to use
+    :param package: package name
+    :param override_deprecated_classes: override target classes with deprecated ones. If module +
+       target class is found in the dictionary, it will be displayed in the warning message.
+    :param extra_message: extra message to display in the warning or import error message
+    """
     for module_name, imports in module_imports.items():
         full_module_name = f"{package}.{module_name}"
         module_type = ModuleType(full_module_name)
+        if override_deprecated_classes and module_name in override_deprecated_classes:
+            override_deprecated_classes_for_module = override_deprecated_classes[module_name]
+        else:
+            override_deprecated_classes_for_module = {}
+
         # Mypy is not able to derive the right function signature https://github.com/python/mypy/issues/2427
         module_type.__getattr__ = functools.partial(  # type: ignore[assignment]
-            getattr_with_deprecation, imports, full_module_name
+            getattr_with_deprecation,
+            imports,
+            full_module_name,
+            override_deprecated_classes_for_module,
+            extra_message if extra_message else "",
         )
         sys.modules.setdefault(full_module_name, module_type)
diff --git a/airflow/config_templates/__init__.py b/airflow/utils/empty_set.py
similarity index 68%
copy from airflow/config_templates/__init__.py
copy to airflow/utils/empty_set.py
index 6dd06760cd..0871c1ec5a 100644
--- a/airflow/config_templates/__init__.py
+++ b/airflow/utils/empty_set.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,14 +14,18 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from __future__ import annotations
 
-from airflow.utils.deprecation_tools import add_deprecated_classes
 
-__deprecated_classes = {
-    "default_celery": {
-        "DEFAULT_CELERY_CONFIG": "airflow.providers.celery.executors.default_celery.DEFAULT_CELERY_CONFIG",
-    },
-}
+def _get_empty_set_for_configuration() -> set[tuple[str, str]]:
+    """
+    Retrieves an empty_set_for_configuration.
+
+    This method is only needed because configuration module has a deprecated method called set, and it
+    confuses mypy. This method will be removed when we remove the deprecated method.
 
-add_deprecated_classes(__deprecated_classes, __name__)
+    :meta private:
+    :return: empty set
+    """
+    return set()
diff --git a/airflow/utils/hashlib_wrapper.py b/airflow/utils/hashlib_wrapper.py
index 09850c565c..65a18566b7 100644
--- a/airflow/utils/hashlib_wrapper.py
+++ b/airflow/utils/hashlib_wrapper.py
@@ -28,9 +28,10 @@ from airflow import PY39
 
 def md5(__string: ReadableBuffer = b"") -> hashlib._Hash:
     """
-    Safely allows calling the hashlib.md5 function when "usedforsecurity" is disabled in the configuration.
+    Safely allows calling the ``hashlib.md5`` function when ``usedforsecurity`` is disabled in
+    the configuration.
 
-    :param string: The data to hash. Default to empty str byte.
+    :param __string: The data to hash. Default to empty str byte.
     :return: The hashed value.
     """
     if PY39:
diff --git a/airflow/utils/sqlalchemy.py b/airflow/utils/sqlalchemy.py
index 499aa31c34..5690799ce4 100644
--- a/airflow/utils/sqlalchemy.py
+++ b/airflow/utils/sqlalchemy.py
@@ -234,8 +234,12 @@ def ensure_pod_is_valid_after_unpickling(pod: V1Pod) -> V1Pod | None:
     if not isinstance(pod, V1Pod):
         return None
     try:
-        from airflow.kubernetes.pod_generator import PodGenerator
-
+        try:
+            from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
+        except ImportError:
+            from airflow.kubernetes.pre_7_4_0_compatibility.pod_generator import (  # type: ignore[assignment]
+                PodGenerator,
+            )
         # now we actually reserialize / deserialize the pod
         pod_dict = sanitize_for_serialization(pod)
         return PodGenerator.deserialize_model_dict(pod_dict)
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 54f46575be..ae8811c05c 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -1541,6 +1541,10 @@ class Airflow(AirflowBaseView):
         """Get rendered k8s yaml."""
         if not settings.IS_K8S_OR_K8SCELERY_EXECUTOR:
             abort(404)
+        # This part is only used for k8s executor so providers.cncf.kubernetes must be installed
+        # with the get_rendered_k8s_spec method
+        from airflow.providers.cncf.kubernetes.template_rendering import get_rendered_k8s_spec
+
         dag_id = request.args.get("dag_id")
         task_id = request.args.get("task_id")
         if task_id is None:
@@ -1560,7 +1564,7 @@ class Airflow(AirflowBaseView):
 
         pod_spec = None
         try:
-            pod_spec = ti.get_rendered_k8s_spec(session=session)
+            pod_spec = get_rendered_k8s_spec(ti, session=session)
         except AirflowException as e:
             if not e.__cause__:
                 flash(f"Error rendering Kubernetes POD Spec: {e}", "error")
diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
index fae38c0a53..b96829539a 100644
--- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
@@ -577,7 +577,7 @@ FROM {params.airflow_image_name_with_tag}
 
 COPY airflow/example_dags/ /opt/airflow/dags/
 
-COPY airflow/kubernetes_executor_templates/ /opt/airflow/pod_templates/
+COPY airflow/providers/cncf/kubernetes/kubernetes_executor_templates/ /opt/airflow/pod_templates/
 
 ENV GUNICORN_CMD_ARGS='--preload' AIRFLOW__WEBSERVER__WORKER_REFRESH_INTERVAL=0
 """
diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py
index 0ef8ef65b9..019058eeff 100644
--- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py
+++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py
@@ -27,6 +27,7 @@ PRE_COMMIT_LIST = [
     "black",
     "blacken-docs",
     "check-aiobotocore-optional",
+    "check-airflow-k8s-not-used",
     "check-airflow-provider-compatibility",
     "check-apache-license-rat",
     "check-base-operator-partial-arguments",
@@ -35,6 +36,7 @@ PRE_COMMIT_LIST = [
     "check-breeze-top-dependencies-limited",
     "check-builtin-literals",
     "check-changelog-has-no-duplicates",
+    "check-cncf-k8s-only-for-executors",
     "check-core-deprecation-classes",
     "check-daysago-import-from-utils",
     "check-decorated-operator-implements-custom-name",
diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py
index 7437642e4e..799a8a5ae5 100644
--- a/dev/breeze/tests/test_selective_checks.py
+++ b/dev/breeze/tests/test_selective_checks.py
@@ -1049,7 +1049,8 @@ def test_upgrade_to_newer_dependencies(files: tuple[str, ...], expected_outputs:
             ("airflow/providers/celery/file.py",),
             {
                 "docs-filter-list-as-string": "--package-filter apache-airflow "
-                "--package-filter apache-airflow-providers-celery",
+                "--package-filter apache-airflow-providers-celery "
+                "--package-filter apache-airflow-providers-cncf-kubernetes"
             },
             id="Celery python files changed",
         ),
diff --git a/docs/apache-airflow-providers-apache-spark/changelog.rst b/docs/apache-airflow-providers-apache-spark/changelog.rst
index 1a8c7b1c3e..36a0eb5118 100644
--- a/docs/apache-airflow-providers-apache-spark/changelog.rst
+++ b/docs/apache-airflow-providers-apache-spark/changelog.rst
@@ -16,6 +16,4 @@
     specific language governing permissions and limitations
     under the License.
 
-
-
 .. include:: ../../airflow/providers/apache/spark/CHANGELOG.rst
diff --git a/docs/apache-airflow-providers-apache-spark/commits.rst b/docs/apache-airflow-providers-apache-spark/commits.rst
index ba6baeae5a..6b2f878510 100644
--- a/docs/apache-airflow-providers-apache-spark/commits.rst
+++ b/docs/apache-airflow-providers-apache-spark/commits.rst
@@ -28,6 +28,21 @@ For high-level changelog, see :doc:`package information including changelog <ind
 
 
 
+4.1.2
+.....
+
+Latest change: 2023-07-06
+
+=================================================================================================  ===========  ===================================================================
+Commit                                                                                             Committed    Subject
+=================================================================================================  ===========  ===================================================================
+`225e3041d2 <https://github.com/apache/airflow/commit/225e3041d269698d0456e09586924c1898d09434>`_  2023-07-06   ``Prepare docs for July 2023 wave of Providers (RC2) (#32381)``
+`3878fe6fab <https://github.com/apache/airflow/commit/3878fe6fab3ccc1461932b456c48996f2763139f>`_  2023-07-05   ``Remove spurious headers for provider changelogs (#32373)``
+`cb4927a018 <https://github.com/apache/airflow/commit/cb4927a01887e2413c45d8d9cb63e74aa994ee74>`_  2023-07-05   ``Prepare docs for July 2023 wave of Providers (#32298)``
+`8c37b74a20 <https://github.com/apache/airflow/commit/8c37b74a208a808d905c1b86d081d69d7a1aa900>`_  2023-06-28   ``D205 Support - Providers: Apache to Common (inclusive) (#32226)``
+`09d4718d3a <https://github.com/apache/airflow/commit/09d4718d3a46aecf3355d14d3d23022002f4a818>`_  2023-06-27   ``Improve provider documentation and README structure (#32125)``
+=================================================================================================  ===========  ===================================================================
+
 4.1.1
 .....
 
@@ -36,6 +51,7 @@ Latest change: 2023-06-20
 =================================================================================================  ===========  =================================================================
 Commit                                                                                             Committed    Subject
 =================================================================================================  ===========  =================================================================
+`79bcc2e668 <https://github.com/apache/airflow/commit/79bcc2e668e648098aad6eaa87fe8823c76bc69a>`_  2023-06-20   ``Prepare RC1 docs for June 2023 wave of Providers (#32001)``
 `8b146152d6 <https://github.com/apache/airflow/commit/8b146152d62118defb3004c997c89c99348ef948>`_  2023-06-20   ``Add note about dropping Python 3.7 for providers (#32015)``
 `6becb70316 <https://github.com/apache/airflow/commit/6becb7031618867bc253aefc9e3e216629575d2d>`_  2023-06-16   ``SparkSubmitOperator: rename spark_conn_id to conn_id (#31952)``
 `13890788ae <https://github.com/apache/airflow/commit/13890788ae939328d451daeaea54f493f4aaaa69>`_  2023-06-07   ``Apache provider docstring improvements (#31730)``
diff --git a/docs/apache-airflow-providers-apache-spark/index.rst b/docs/apache-airflow-providers-apache-spark/index.rst
index 1bd8a1def9..300a89e540 100644
--- a/docs/apache-airflow-providers-apache-spark/index.rst
+++ b/docs/apache-airflow-providers-apache-spark/index.rst
@@ -76,7 +76,7 @@ Package apache-airflow-providers-apache-spark
 `Apache Spark <https://spark.apache.org/>`__
 
 
-Release: 4.1.1
+Release: 4.1.2
 
 Provider package
 ----------------
@@ -102,3 +102,31 @@ PIP package         Version required
 ``apache-airflow``  ``>=2.4.0``
 ``pyspark``
 ==================  ==================
+
+Cross provider package dependencies
+-----------------------------------
+
+Those are dependencies that might be needed in order to use all the features of the package.
+You need to install the specified provider packages in order to use them.
+
+You can install such cross-provider dependencies when installing from PyPI. For example:
+
+.. code-block:: bash
+
+    pip install apache-airflow-providers-apache-spark[cncf.kubernetes]
+
+
+======================================================================================================================  ===================
+Dependent package                                                                                                       Extra
+======================================================================================================================  ===================
+`apache-airflow-providers-cncf-kubernetes <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes>`_  ``cncf.kubernetes``
+======================================================================================================================  ===================
+
+Downloading official packages
+-----------------------------
+
+You can download officially released packages and verify their checksums and signatures from the
+`Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
+
+* `The apache-airflow-providers-apache-spark 4.1.2 sdist package <https://downloads.apache.org/airflow/providers/apache-airflow-providers-apache-spark-4.1.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache-airflow-providers-apache-spark-4.1.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache-airflow-providers-apache-spark-4.1.2.tar.gz.sha512>`__)
+* `The apache-airflow-providers-apache-spark 4.1.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_spark-4.1.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_spark-4.1.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_spark-4.1.2-py3-none-any.whl.sha512>`__)
diff --git a/docs/apache-airflow-providers-celery/changelog.rst b/docs/apache-airflow-providers-celery/changelog.rst
index 301ee64f54..70dcac56ec 100644
--- a/docs/apache-airflow-providers-celery/changelog.rst
+++ b/docs/apache-airflow-providers-celery/changelog.rst
@@ -16,6 +16,4 @@
     specific language governing permissions and limitations
     under the License.
 
-
-
 .. include:: ../../airflow/providers/celery/CHANGELOG.rst
diff --git a/docs/apache-airflow-providers-celery/commits.rst b/docs/apache-airflow-providers-celery/commits.rst
index 760e1b64ae..66647f17f9 100644
--- a/docs/apache-airflow-providers-celery/commits.rst
+++ b/docs/apache-airflow-providers-celery/commits.rst
@@ -28,6 +28,26 @@ For high-level changelog, see :doc:`package information including changelog <ind
 
 
 
+3.3.0
+.....
+
+Latest change: 2023-07-22
+
+=================================================================================================  ===========  ===================================================================
+Commit                                                                                             Committed    Subject
+=================================================================================================  ===========  ===================================================================
+`56c41d460c <https://github.com/apache/airflow/commit/56c41d460c3f2a4e871c7834033c3152e71f71d2>`_  2023-07-22   ``Introduce decorator to load providers configuration (#32765)``
+`73b90c48b1 <https://github.com/apache/airflow/commit/73b90c48b1933b49086d34176527947bd727ec85>`_  2023-07-21   ``Allow configuration to be contributed by providers (#32604)``
+`ea0deaa993 <https://github.com/apache/airflow/commit/ea0deaa993674ad0e4ef777d687dc13809b0ec5d>`_  2023-07-16   ``Move default_celery.py to inside the provider (#32628)``
+`624211f33f <https://github.com/apache/airflow/commit/624211f33f30d0147b9daeb5913d2eb01861a842>`_  2023-07-12   ``Add Executors discovery and documentation (#32532)``
+`40d54eac1a <https://github.com/apache/airflow/commit/40d54eac1a2f35167bdd179fda3fd018fe32d116>`_  2023-07-12   ``Move CeleryExecutor to the celery provider (#32526)``
+`225e3041d2 <https://github.com/apache/airflow/commit/225e3041d269698d0456e09586924c1898d09434>`_  2023-07-06   ``Prepare docs for July 2023 wave of Providers (RC2) (#32381)``
+`3878fe6fab <https://github.com/apache/airflow/commit/3878fe6fab3ccc1461932b456c48996f2763139f>`_  2023-07-05   ``Remove spurious headers for provider changelogs (#32373)``
+`cb4927a018 <https://github.com/apache/airflow/commit/cb4927a01887e2413c45d8d9cb63e74aa994ee74>`_  2023-07-05   ``Prepare docs for July 2023 wave of Providers (#32298)``
+`8c37b74a20 <https://github.com/apache/airflow/commit/8c37b74a208a808d905c1b86d081d69d7a1aa900>`_  2023-06-28   ``D205 Support - Providers: Apache to Common (inclusive) (#32226)``
+`09d4718d3a <https://github.com/apache/airflow/commit/09d4718d3a46aecf3355d14d3d23022002f4a818>`_  2023-06-27   ``Improve provider documentation and README structure (#32125)``
+=================================================================================================  ===========  ===================================================================
+
 3.2.1
 .....
 
@@ -36,6 +56,7 @@ Latest change: 2023-06-20
 =================================================================================================  ===========  =============================================================
 Commit                                                                                             Committed    Subject
 =================================================================================================  ===========  =============================================================
+`79bcc2e668 <https://github.com/apache/airflow/commit/79bcc2e668e648098aad6eaa87fe8823c76bc69a>`_  2023-06-20   ``Prepare RC1 docs for June 2023 wave of Providers (#32001)``
 `8b146152d6 <https://github.com/apache/airflow/commit/8b146152d62118defb3004c997c89c99348ef948>`_  2023-06-20   ``Add note about dropping Python 3.7 for providers (#32015)``
 =================================================================================================  ===========  =============================================================
 
diff --git a/docs/apache-airflow-providers-celery/index.rst b/docs/apache-airflow-providers-celery/index.rst
index 464f12cd92..88a2a0ddd3 100644
--- a/docs/apache-airflow-providers-celery/index.rst
+++ b/docs/apache-airflow-providers-celery/index.rst
@@ -56,7 +56,7 @@ Package apache-airflow-providers-celery
 `Celery <http://www.celeryproject.org/>`__
 
 
-Release: 3.2.1
+Release: 3.3.0
 
 Provider package
 ----------------
@@ -83,3 +83,31 @@ PIP package         Version required
 ``celery``          ``>=5.2.3,<6``
 ``flower``          ``>=1.0.0``
 ==================  ==================
+
+Cross provider package dependencies
+-----------------------------------
+
+Those are dependencies that might be needed in order to use all the features of the package.
+You need to install the specified provider packages in order to use them.
+
+You can install such cross-provider dependencies when installing from PyPI. For example:
+
+.. code-block:: bash
+
+    pip install apache-airflow-providers-celery[cncf.kubernetes]
+
+
+======================================================================================================================  ===================
+Dependent package                                                                                                       Extra
+======================================================================================================================  ===================
+`apache-airflow-providers-cncf-kubernetes <https://airflow.apache.org/docs/apache-airflow-providers-cncf-kubernetes>`_  ``cncf.kubernetes``
+======================================================================================================================  ===================
+
+Downloading official packages
+-----------------------------
+
+You can download officially released packages and verify their checksums and signatures from the
+`Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
+
+* `The apache-airflow-providers-celery 3.3.0 sdist package <https://downloads.apache.org/airflow/providers/apache-airflow-providers-celery-3.3.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache-airflow-providers-celery-3.3.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache-airflow-providers-celery-3.3.0.tar.gz.sha512>`__)
+* `The apache-airflow-providers-celery 3.3.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_celery-3.3.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_celery-3.3.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_celery-3.3.0-py3-none-any.whl.sha512>`__)
diff --git a/docs/apache-airflow-providers-celery/changelog.rst b/docs/apache-airflow-providers-cncf-kubernetes/configurations-ref.rst
similarity index 92%
copy from docs/apache-airflow-providers-celery/changelog.rst
copy to docs/apache-airflow-providers-cncf-kubernetes/configurations-ref.rst
index 301ee64f54..5885c9d91b 100644
--- a/docs/apache-airflow-providers-celery/changelog.rst
+++ b/docs/apache-airflow-providers-cncf-kubernetes/configurations-ref.rst
@@ -1,4 +1,3 @@
-
  .. Licensed to the Apache Software Foundation (ASF) under one
     or more contributor license agreements.  See the NOTICE file
     distributed with this work for additional information
@@ -16,6 +15,4 @@
     specific language governing permissions and limitations
     under the License.
 
-
-
-.. include:: ../../airflow/providers/celery/CHANGELOG.rst
+.. include:: ../exts/includes/providers-configurations-ref.rst
diff --git a/docs/apache-airflow-providers-cncf-kubernetes/index.rst b/docs/apache-airflow-providers-cncf-kubernetes/index.rst
index 95ce846ab1..9a19797d8f 100644
--- a/docs/apache-airflow-providers-cncf-kubernetes/index.rst
+++ b/docs/apache-airflow-providers-cncf-kubernetes/index.rst
@@ -42,6 +42,7 @@
     :maxdepth: 1
     :caption: References
 
+    Configuration <configurations-ref>
     Python API <_api/airflow/providers/cncf/kubernetes/index>
 
 .. toctree::
diff --git a/docs/apache-airflow-providers-cncf-kubernetes/operators.rst b/docs/apache-airflow-providers-cncf-kubernetes/operators.rst
index 2c020a53e0..7b7a77b4f7 100644
--- a/docs/apache-airflow-providers-cncf-kubernetes/operators.rst
+++ b/docs/apache-airflow-providers-cncf-kubernetes/operators.rst
@@ -97,7 +97,7 @@ like this:
 With this API object, you can have access to all Kubernetes API objects in the form of python classes.
 Using this method will ensure correctness
 and type safety. While we have removed almost all Kubernetes convenience classes, we have kept the
-:class:`~airflow.kubernetes.secret.Secret` class to simplify the process of generating secret volumes/env variables.
+:class:`~airflow.providers.cncf.kubernetes.secret.Secret` class to simplify the process of generating secret volumes/env variables.
 
 .. exampleinclude:: /../../tests/system/providers/cncf/kubernetes/example_kubernetes.py
     :language: python
diff --git a/docs/apache-airflow/configurations-ref.rst b/docs/apache-airflow/configurations-ref.rst
index 7a32f1790d..c4882a8b90 100644
--- a/docs/apache-airflow/configurations-ref.rst
+++ b/docs/apache-airflow/configurations-ref.rst
@@ -37,6 +37,7 @@ in the provider's documentation. The pre-installed providers that you may want t
 
 * :doc:`Configuration Reference for Celery Provider <apache-airflow-providers-celery:configurations-ref>`
 * :doc:`Configuration Reference for Apache Hive Provider <apache-airflow-providers-apache-hive:configurations-ref>`
+* :doc:`Configuration Reference for CNCF Kubernetes Provider <apache-airflow-providers-cncf-kubernetes:configurations-ref>`
 
 .. note::
     For more information see :doc:`/howto/set-config`.
diff --git a/docs/apache-airflow/core-concepts/executor/celery.rst b/docs/apache-airflow/core-concepts/executor/celery.rst
index f69da1570f..1113bda530 100644
--- a/docs/apache-airflow/core-concepts/executor/celery.rst
+++ b/docs/apache-airflow/core-concepts/executor/celery.rst
@@ -21,6 +21,13 @@
 Celery Executor
 ===============
 
+.. note::
+
+    As of Airflow 2.7.0, you need to install the ``celery`` provider package to use this executor.
+    This can be done by installing ``apache-airflow-providers-celery>=3.3.0`` or by installing Airflow
+    with the ``celery`` extra: ``pip install 'apache-airflow[celery]'``.
+
+
 ``CeleryExecutor`` is one of the ways you can scale out the number of workers. For this
 to work, you need to setup a Celery backend (**RabbitMQ**, **Redis**, **Redis Sentinel** ...) and
 change your ``airflow.cfg`` to point the executor parameter to
diff --git a/docs/apache-airflow/core-concepts/executor/celery_kubernetes.rst b/docs/apache-airflow/core-concepts/executor/celery_kubernetes.rst
index 1eaf9bda58..6416f5d2fa 100644
--- a/docs/apache-airflow/core-concepts/executor/celery_kubernetes.rst
+++ b/docs/apache-airflow/core-concepts/executor/celery_kubernetes.rst
@@ -21,6 +21,14 @@
 CeleryKubernetes Executor
 =========================
 
+.. note::
+
+    As of Airflow 2.7.0, you need to install both the ``celery`` and ``cncf.kubernetes`` provider package to use
+    this executor. This can be done by installing ``apache-airflow-providers-celery>=3.3.0`` and
+    ``apache-airflow-providers-cncf-kubernetes>=7.4.0`` or by installing Airflow
+    with the ``celery`` and ``cncf.kubernetes`` extras: ``pip install 'apache-airflow[celery,cncf.kubernetes]'``.
+
+
 The :class:`~airflow.providers.celery.executors.celery_kubernetes_executor.CeleryKubernetesExecutor` allows users
 to run simultaneously a ``CeleryExecutor`` and a ``KubernetesExecutor``.
 An executor is chosen to run a task based on the task's queue.
diff --git a/docs/apache-airflow/core-concepts/executor/kubernetes.rst b/docs/apache-airflow/core-concepts/executor/kubernetes.rst
index c5361e3417..e19264c96e 100644
--- a/docs/apache-airflow/core-concepts/executor/kubernetes.rst
+++ b/docs/apache-airflow/core-concepts/executor/kubernetes.rst
@@ -21,6 +21,14 @@
 Kubernetes Executor
 ===================
 
+.. note::
+
+    As of Airflow 2.7.0, you need to install the ``cncf.kubernetes`` provider package to use
+    this executor. This can done by installing ``apache-airflow-providers-cncf-kubernetes>=7.4.0``
+    or by installing Airflow with the ``cncf.kubernetes`` extras:
+    ``pip install 'apache-airflow[cncf.kubernetes]'``.
+
+
 The Kubernetes executor runs each task instance in its own pod on a Kubernetes cluster.
 
 KubernetesExecutor runs as a process in the Airflow Scheduler. The scheduler itself does
@@ -100,21 +108,21 @@ With these requirements in mind, here are some examples of basic ``pod_template_
 
 Storing DAGs in the image:
 
-.. literalinclude:: /../../airflow/kubernetes/pod_template_file_examples/dags_in_image_template.yaml
+.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_image_template.yaml
     :language: yaml
     :start-after: [START template_with_dags_in_image]
     :end-before: [END template_with_dags_in_image]
 
 Storing DAGs in a ``persistentVolume``:
 
-.. literalinclude:: /../../airflow/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml
+.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/dags_in_volume_template.yaml
     :language: yaml
     :start-after: [START template_with_dags_in_volume]
     :end-before: [END template_with_dags_in_volume]
 
 Pulling DAGs from ``git``:
 
-.. literalinclude:: /../../airflow/kubernetes/pod_template_file_examples/git_sync_template.yaml
+.. literalinclude:: /../../airflow/providers/cncf/kubernetes/pod_template_file_examples/git_sync_template.yaml
     :language: yaml
     :start-after:  [START git_sync_template]
     :end-before: [END git_sync_template]
diff --git a/docs/apache-airflow/core-concepts/executor/local_kubernetes.rst b/docs/apache-airflow/core-concepts/executor/local_kubernetes.rst
index ecb0dc5365..d50c7fc878 100644
--- a/docs/apache-airflow/core-concepts/executor/local_kubernetes.rst
+++ b/docs/apache-airflow/core-concepts/executor/local_kubernetes.rst
@@ -21,7 +21,14 @@
 LocalKubernetes Executor
 =========================
 
-The :class:`~airflow.executors.local_kubernetes_executor.LocalKubernetesExecutor` allows users
+.. note::
+
+    As of Airflow 2.7.0, you need to install the ``cncf.kubernetes`` provider package to use
+    this executor. This can be done by installing ``apache-airflow-providers-cncf-kubernetes>=7.4.0``
+    or by installing Airflow with the ``cncf.kubernetes`` extras:
+    ``pip install 'apache-airflow[cncf.kubernetes]'``.
+
+The :class:`~airflow.providers.cncf.kubernetes.executors.local_kubernetes_executor.LocalKubernetesExecutor` allows users
 to simultaneously run a ``LocalExecutor`` and a ``KubernetesExecutor``.
 An executor is chosen to run a task based on the task's queue.
 
diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst
index ef6c4bf1b2..a17bfa0736 100644
--- a/docs/apache-airflow/extra-packages-ref.rst
+++ b/docs/apache-airflow/extra-packages-ref.rst
@@ -37,47 +37,43 @@ These are core airflow extras that extend capabilities of core Airflow. They usu
 packages (with the exception of ``celery`` and ``cncf.kubernetes`` extras), they just install necessary
 python dependencies for the provided package.
 
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| extra               | install command                                     | enables                                                                    | Preinstalled |
-+=====================+=====================================================+============================================================================+==============+
-| aiobotocore         | ``pip install 'apache-airflow[aiobotocore]'``       | Support for asynchronous (deferrable) operators for Amazon integration     |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| async               | ``pip install 'apache-airflow[async]'``             | Async worker classes for Gunicorn                                          |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| celery              | ``pip install 'apache-airflow[celery]'``            | CeleryExecutor (also installs the celery provider package!)                |      *       |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| cgroups             | ``pip install 'apache-airflow[cgroups]'``           | Needed To use CgroupTaskRunner                                             |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| cncf.kubernetes     | ``pip install 'apache-airflow[cncf.kubernetes]'``   | Kubernetes Executor (also installs the Kubernetes provider package)        |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| daskexecutor        | ``pip install 'apache-airflow[daskexecutor]'``      | DaskExecutor  ((also installs the Daskexecutor provider package)           |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| deprecated_api      | ``pip install 'apache-airflow[deprecated_api]'``    | Deprecated, experimental API that is replaced with the new REST API        |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| github_enterprise   | ``pip install 'apache-airflow[github_enterprise]'`` | GitHub Enterprise auth backend                                             |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| google_auth         | ``pip install 'apache-airflow[google_auth]'``       | Google auth backend                                                        |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| kerberos            | ``pip install 'apache-airflow[kerberos]'``          | Kerberos integration for Kerberized services (Hadoop, Presto, Trino)       |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| ldap                | ``pip install 'apache-airflow[ldap]'``              | LDAP authentication for users                                              |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| leveldb             | ``pip install 'apache-airflow[leveldb]'``           | Required for use leveldb extra in google provider                          |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| otel                | ``pip install 'apache-airflow[otel]'``              | Required for OpenTelemetry metrics                                         |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| pandas              | ``pip install 'apache-airflow[pandas]'``            | Install Pandas library compatible with Airflow                             |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| password            | ``pip install 'apache-airflow[password]'``          | Password authentication for users                                          |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| rabbitmq            | ``pip install 'apache-airflow[rabbitmq]'``          | RabbitMQ support as a Celery backend                                       |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| sentry              | ``pip install 'apache-airflow[sentry]'``            | Sentry service for application logging and monitoring                      |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| statsd              | ``pip install 'apache-airflow[statsd]'``            | Needed by StatsD metrics                                                   |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
-| virtualenv          | ``pip install 'apache-airflow[virtualenv]'``        | Running python tasks in local virtualenv                                   |              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+--------------+
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| extra               | install command                                     | enables                                                                    |
++=====================+=====================================================+============================================================================+
+| aiobotocore         | ``pip install 'apache-airflow[aiobotocore]'``       | Support for asynchronous (deferrable) operators for Amazon integration     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| async               | ``pip install 'apache-airflow[async]'``             | Async worker classes for Gunicorn                                          |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| cgroups             | ``pip install 'apache-airflow[cgroups]'``           | Needed To use CgroupTaskRunner                                             |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| daskexecutor        | ``pip install 'apache-airflow[daskexecutor]'``      | DaskExecutor  ((also installs the Daskexecutor provider package)           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| deprecated_api      | ``pip install 'apache-airflow[deprecated_api]'``    | Deprecated, experimental API that is replaced with the new REST API        |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| github_enterprise   | ``pip install 'apache-airflow[github_enterprise]'`` | GitHub Enterprise auth backend                                             |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| google_auth         | ``pip install 'apache-airflow[google_auth]'``       | Google auth backend                                                        |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| kerberos            | ``pip install 'apache-airflow[kerberos]'``          | Kerberos integration for Kerberized services (Hadoop, Presto, Trino)       |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| ldap                | ``pip install 'apache-airflow[ldap]'``              | LDAP authentication for users                                              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| leveldb             | ``pip install 'apache-airflow[leveldb]'``           | Required for use leveldb extra in google provider                          |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| otel                | ``pip install 'apache-airflow[otel]'``              | Required for OpenTelemetry metrics                                         |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| pandas              | ``pip install 'apache-airflow[pandas]'``            | Install Pandas library compatible with Airflow                             |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| password            | ``pip install 'apache-airflow[password]'``          | Password authentication for users                                          |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| rabbitmq            | ``pip install 'apache-airflow[rabbitmq]'``          | RabbitMQ support as a Celery backend                                       |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| sentry              | ``pip install 'apache-airflow[sentry]'``            | Sentry service for application logging and monitoring                      |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| statsd              | ``pip install 'apache-airflow[statsd]'``            | Needed by StatsD metrics                                                   |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| virtualenv          | ``pip install 'apache-airflow[virtualenv]'``        | Running python tasks in local virtualenv                                   |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
 
 
 Providers extras
@@ -188,6 +184,8 @@ These are extras that add dependencies needed for integration with external serv
 +---------------------+-----------------------------------------------------+-----------------------------------------------------+
 | facebook            | ``pip install 'apache-airflow[facebook]'``          | Facebook Social                                     |
 +---------------------+-----------------------------------------------------+-----------------------------------------------------+
+| github              | ``pip install 'apache-airflow[github]'``            | GitHub operators and hook                           |
++---------------------+-----------------------------------------------------+-----------------------------------------------------+
 | google              | ``pip install 'apache-airflow[google]'``            | Google Cloud                                        |
 +---------------------+-----------------------------------------------------+-----------------------------------------------------+
 | hashicorp           | ``pip install 'apache-airflow[hashicorp]'``         | Hashicorp Services (Vault)                          |
@@ -232,6 +230,10 @@ These are extras that add dependencies needed for integration with other softwar
 +=====================+=====================================================+===========================================+
 | arangodb            | ``pip install 'apache-airflow[arangodb]'``          | ArangoDB operators, sensors and hook      |
 +---------------------+-----------------------------------------------------+-------------------------------------------+
+| celery              | ``pip install 'apache-airflow[celery]'``            | CeleryExecutor                            |
++---------------------+-----------------------------------------------------+-------------------------------------------+
+| cncf.kubernetes     | ``pip install 'apache-airflow[cncf.kubernetes]'``   | Kubernetes Executor                       |
++---------------------+-----------------------------------------------------+-------------------------------------------+
 | docker              | ``pip install 'apache-airflow[docker]'``            | Docker hooks and operators                |
 +---------------------+-----------------------------------------------------+-------------------------------------------+
 | elasticsearch       | ``pip install 'apache-airflow[elasticsearch]'``     | Elasticsearch hooks and Log Handler       |
@@ -349,10 +351,10 @@ Those are the extras that are needed to generated documentation for Airflow. Thi
 +---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
 
 
-Deprecated extras
------------------
+Deprecated 1.10 extras
+----------------------
 
-These are the extras that have been used before and deprecated in 2.0 and will be removed in Airflow 3.0.0. They were
+These are the extras that have been deprecated in 2.0 and will be removed in Airflow 3.0.0. They were
 all replaced by new extras, which have naming consistent with the names of provider packages.
 
 The ``crypto`` extra is not needed any more, because all crypto dependencies are part of airflow package,
diff --git a/docs/apache-airflow/howto/upgrading-from-1-10/index.rst b/docs/apache-airflow/howto/upgrading-from-1-10/index.rst
index 782043f234..f1956d89b9 100644
--- a/docs/apache-airflow/howto/upgrading-from-1-10/index.rst
+++ b/docs/apache-airflow/howto/upgrading-from-1-10/index.rst
@@ -171,7 +171,7 @@ by using the ``| default`` Jinja filter as shown below.
 Much like the ``KubernetesExecutor``, the ``KubernetesPodOperator`` will no longer take Airflow custom classes and will
 instead expect either a pod_template yaml file, or ``kubernetes.client.models`` objects.
 
-The one notable exception is that we will continue to support the ``airflow.kubernetes.secret.Secret`` class.
+The one notable exception is that we will continue to support the ``airflow.providers.cncf.kubernetes.secret.Secret`` class.
 
 Whereas previously a user would import each individual class to build the pod as so:
 
@@ -218,7 +218,7 @@ Now the user can use the ``kubernetes.client.models`` class as a single point of
 .. code-block:: python
 
     from kubernetes.client import models as k8s
-    from airflow.kubernetes.secret import Secret
+    from airflow.providers.cncf.kubernetes.secret import Secret
 
 
     configmaps = ["test-configmap-1", "test-configmap-2"]
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 6bfca3ef37..fb721ada25 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -35,6 +35,7 @@ alibaba
 allAuthenticatedUsers
 allowinsert
 allUsers
+alphanumerics
 Alphasort
 amazonaws
 amqp
@@ -296,6 +297,7 @@ ContainerPort
 contentUrl
 contextmgr
 contrib
+CoreV
 coroutine
 coverals
 cp
@@ -678,6 +680,7 @@ Harenslak
 Hashable
 Hashicorp
 hashicorp
+hashlib
 hasn
 HCatalog
 hcatalog
@@ -1105,7 +1108,9 @@ pluggable
 pluggy
 plyvel
 png
+PodManager
 podName
+PodSpec
 podSpec
 podspec
 poller
diff --git a/generated/provider_dependencies.json b/generated/provider_dependencies.json
index f4eeb8a273..646b296444 100644
--- a/generated/provider_dependencies.json
+++ b/generated/provider_dependencies.json
@@ -197,7 +197,9 @@
       "apache-airflow>=2.4.0",
       "pyspark"
     ],
-    "cross-providers-deps": [],
+    "cross-providers-deps": [
+      "cncf.kubernetes"
+    ],
     "excluded-python-versions": []
   },
   "apache.sqoop": {
@@ -245,7 +247,9 @@
       "celery>=5.2.3,<6",
       "flower>=1.0.0"
     ],
-    "cross-providers-deps": [],
+    "cross-providers-deps": [
+      "cncf.kubernetes"
+    ],
     "excluded-python-versions": []
   },
   "cloudant": {
diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt
index 540a5938a0..c0ccd98d5a 100644
--- a/images/breeze/output-commands-hash.txt
+++ b/images/breeze/output-commands-hash.txt
@@ -62,7 +62,7 @@ setup:version:be116d90a21c2afe01087f7609774e1e
 setup:cdf58a9c27af2877dc5a658ed0a1d99c
 shell:87e7bdcebe1180395adfec86b2a065f5
 start-airflow:f7216a8126ecf14b033e2ea677e1a105
-static-checks:f9ec0d7edaba84180403d95469d94ea0
+static-checks:a6b76083afc49c0c80270facd5236b42
 testing:docker-compose-tests:0c810047fc66a0cfe91119e2d08b3507
 testing:helm-tests:8e491da2e01ebd815322c37562059d77
 testing:integration-tests:486e4d91449ecdb7630ef2a470d705a3
diff --git a/images/breeze/output_static-checks.svg b/images/breeze/output_static-checks.svg
index 6220e1db62..083d5f6681 100644
--- a/images/breeze/output_static-checks.svg
+++ b/images/breeze/output_static-checks.svg
@@ -1,4 +1,4 @@
-<svg class="rich-terminal" viewBox="0 0 1482 1855.6" xmlns="http://www.w3.org/2000/svg">
+<svg class="rich-terminal" viewBox="0 0 1482 1880.0" xmlns="http://www.w3.org/2000/svg">
     <!-- Generated with Rich https://www.textualize.io -->
     <style>
 
@@ -43,7 +43,7 @@
 
     <defs>
     <clipPath id="breeze-static-checks-clip-terminal">
-      <rect x="0" y="0" width="1463.0" height="1804.6" />
+      <rect x="0" y="0" width="1463.0" height="1829.0" />
     </clipPath>
     <clipPath id="breeze-static-checks-line-0">
     <rect x="0" y="1.5" width="1464" height="24.65"/>
@@ -264,9 +264,12 @@
 <clipPath id="breeze-static-checks-line-72">
     <rect x="0" y="1758.3" width="1464" height="24.65"/>
             </clipPath>
+<clipPath id="breeze-static-checks-line-73">
+    <rect x="0" y="1782.7" width="1464" height="24.65"/>
+            </clipPath>
     </defs>
 
-    <rect fill="#292929" stroke="rgba(255,255,255,0.35)" stroke-width="1" x="1" y="1" width="1480" height="1853.6" rx="8"/><text class="breeze-static-checks-title" fill="#c5c8c6" text-anchor="middle" x="740" y="27">Command:&#160;static-checks</text>
+    <rect fill="#292929" stroke="rgba(255,255,255,0.35)" stroke-width="1" x="1" y="1" width="1480" height="1878" rx="8"/><text class="breeze-static-checks-title" fill="#c5c8c6" text-anchor="middle" x="740" y="27">Command:&#160;static-checks</text>
             <g transform="translate(26,22)">
             <circle cx="0" cy="0" r="7" fill="#ff5f57"/>
             <circle cx="22" cy="0" r="7" fill="#febc2e"/>
@@ -284,72 +287,73 @@
 </text><text class="breeze-static-checks-r4" x="0" y="142" textLength="24.4" clip-path="url(#breeze-static-checks-line-5)">╭─</text><text class="breeze-static-checks-r4" x="24.4" y="142" textLength="219.6" clip-path="url(#breeze-static-checks-line-5)">&#160;Pre-commit&#160;flags&#160;</text><text class="breeze-static-checks-r4" x="244" y="142" textLength="1195.6" clip-path="url(#breeze-static-checks-line-5)">──────────────────────────────────────────────────────────────────────────────── [...]
 </text><text class="breeze-static-checks-r4" x="0" y="166.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-6)">│</text><text class="breeze-static-checks-r5" x="24.4" y="166.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-6)">-</text><text class="breeze-static-checks-r5" x="36.6" y="166.4" textLength="61" clip-path="url(#breeze-static-checks-line-6)">-type</text><text class="breeze-static-checks-r6" x="402.6" y="166.4" textLength="24.4" clip-path="url(#breeze- [...]
 </text><text class="breeze-static-checks-r4" x="0" y="190.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-7)">│</text><text class="breeze-static-checks-r7" x="451.4" y="190.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-7)">(all&#160;|&#160;black&#160;|&#160;blacken-docs&#160;|&#160;check-aiobotocore-optional&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</tex [...]
-</text><text class="breeze-static-checks-r4" x="0" y="215.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-8)">│</text><text class="breeze-static-checks-r7" x="451.4" y="215.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-8)">check-airflow-provider-compatibility&#160;|&#160;check-apache-license-rat&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="2 [...]
-</text><text class="breeze-static-checks-r4" x="0" y="239.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-9)">│</text><text class="breeze-static-checks-r7" x="451.4" y="239.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-9)">check-base-operator-partial-arguments&#160;|&#160;check-base-operator-usage&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="239.6" text [...]
-</text><text class="breeze-static-checks-r4" x="0" y="264" textLength="12.2" clip-path="url(#breeze-static-checks-line-10)">│</text><text class="breeze-static-checks-r7" x="451.4" y="264" textLength="988.2" clip-path="url(#breeze-static-checks-line-10)">check-boring-cyborg-configuration&#160;|&#160;check-breeze-top-dependencies-limited&#160;|&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="264" textLength="12.2" clip-path="url(#breeze-static- [...]
-</text><text class="breeze-static-checks-r4" x="0" y="288.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-11)">│</text><text class="breeze-static-checks-r7" x="451.4" y="288.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-11)">check-builtin-literals&#160;|&#160;check-changelog-has-no-duplicates&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static [...]
-</text><text class="breeze-static-checks-r4" x="0" y="312.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-12)">│</text><text class="breeze-static-checks-r7" x="451.4" y="312.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-12)">check-core-deprecation-classes&#160;|&#160;check-daysago-import-from-utils&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="312. [...]
-</text><text class="breeze-static-checks-r4" x="0" y="337.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-13)">│</text><text class="breeze-static-checks-r7" x="451.4" y="337.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-13)">check-decorated-operator-implements-custom-name&#160;|&#160;check-deferrable-default-value&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="337.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-13)">│</text [...]
-</text><text class="breeze-static-checks-r4" x="0" y="361.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-14)">│</text><text class="breeze-static-checks-r7" x="451.4" y="361.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-14)">|&#160;check-docstring-param-types&#160;|&#160;check-example-dags-urls&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text  [...]
-</text><text class="breeze-static-checks-r4" x="0" y="386" textLength="12.2" clip-path="url(#breeze-static-checks-line-15)">│</text><text class="breeze-static-checks-r7" x="451.4" y="386" textLength="988.2" clip-path="url(#breeze-static-checks-line-15)">check-executables-have-shebangs&#160;|&#160;check-extra-packages-references&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="386" textLeng [...]
-</text><text class="breeze-static-checks-r4" x="0" y="410.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-16)">│</text><text class="breeze-static-checks-r7" x="451.4" y="410.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-16)">check-extras-order&#160;|&#160;check-for-inclusive-language&#160;|&#160;check-hooks-apply&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="410.4" textLength=" [...]
-</text><text class="breeze-static-checks-r4" x="0" y="434.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-17)">│</text><text class="breeze-static-checks-r7" x="451.4" y="434.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-17)">check-incorrect-use-of-LoggingMixin&#160;|&#160;check-init-decorator-arguments&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="434.8" textLength="12.2" [...]
-</text><text class="breeze-static-checks-r4" x="0" y="459.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-18)">│</text><text class="breeze-static-checks-r7" x="451.4" y="459.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-18)">check-lazy-logging&#160;|&#160;check-links-to-example-dags-do-not-use-hardcoded-versions&#160;|&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="459.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-18)">│< [...]
-</text><text class="breeze-static-checks-r4" x="0" y="483.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-19)">│</text><text class="breeze-static-checks-r7" x="451.4" y="483.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-19)">check-merge-conflict&#160;|&#160;check-newsfragments-are-valid&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</t [...]
-</text><text class="breeze-static-checks-r4" x="0" y="508" textLength="12.2" clip-path="url(#breeze-static-checks-line-20)">│</text><text class="breeze-static-checks-r7" x="451.4" y="508" textLength="988.2" clip-path="url(#breeze-static-checks-line-20)">check-no-airflow-deprecation-in-providers&#160;|&#160;check-no-providers-in-core-examples&#160;|</text><text class="breeze-static-checks-r4" x="1451.8" y="508" textLength="12.2" clip-path="url(#breeze-static-checks-line-20)">│</text><text [...]
-</text><text class="breeze-static-checks-r4" x="0" y="532.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-21)">│</text><text class="breeze-static-checks-r7" x="451.4" y="532.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-21)">check-no-relative-imports&#160;|&#160;check-only-new-session-with-provide-session&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="532.4" textLength="12.2" clip-path="url [...]
-</text><text class="breeze-static-checks-r4" x="0" y="556.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-22)">│</text><text class="breeze-static-checks-r7" x="451.4" y="556.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-22)">check-persist-credentials-disabled-in-github-workflows&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="bre [...]
-</text><text class="breeze-static-checks-r4" x="0" y="581.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-23)">│</text><text class="breeze-static-checks-r7" x="451.4" y="581.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-23)">check-pre-commit-information-consistent&#160;|&#160;check-provide-create-sessions-imports&#160;|</text><text class="breeze-static-checks-r4" x="1451.8" y="581.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-23)">│</text [...]
-</text><text class="breeze-static-checks-r4" x="0" y="605.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-24)">│</text><text class="breeze-static-checks-r7" x="451.4" y="605.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-24)">check-provider-yaml-valid&#160;|&#160;check-providers-init-file-missing&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x=" [...]
-</text><text class="breeze-static-checks-r4" x="0" y="630" textLength="12.2" clip-path="url(#breeze-static-checks-line-25)">│</text><text class="breeze-static-checks-r7" x="451.4" y="630" textLength="988.2" clip-path="url(#breeze-static-checks-line-25)">check-providers-subpackages-init-file-exist&#160;|&#160;check-pydevd-left-in-code&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="630" textLength="12.2" clip-path="url(#bree [...]
-</text><text class="breeze-static-checks-r4" x="0" y="654.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-26)">│</text><text class="breeze-static-checks-r7" x="451.4" y="654.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-26)">check-revision-heads-map&#160;|&#160;check-safe-filter-usage-in-html&#160;|&#160;check-setup-order&#160;|&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="654.4" textLength="12.2" clip-path="url(#breeze-static-checks-li [...]
-</text><text class="breeze-static-checks-r4" x="0" y="678.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-27)">│</text><text class="breeze-static-checks-r7" x="451.4" y="678.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-27)">check-start-date-not-used-in-defaults&#160;|&#160;check-system-tests-present&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="678.8" textLen [...]
-</text><text class="breeze-static-checks-r4" x="0" y="703.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-28)">│</text><text class="breeze-static-checks-r7" x="451.4" y="703.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-28)">check-system-tests-tocs&#160;|&#160;check-tests-unittest-testcase&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class [...]
-</text><text class="breeze-static-checks-r4" x="0" y="727.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-29)">│</text><text class="breeze-static-checks-r7" x="451.4" y="727.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-29)">check-urlparse-usage-in-code&#160;|&#160;check-usage-of-re2-over-re&#160;|&#160;check-xml&#160;|&#160;codespell</text><text class="breeze-static-checks-r4" x="1451.8" y="727.6" textLength="12.2" clip-path="url(#breeze-static-checks-li [...]
-</text><text class="breeze-static-checks-r4" x="0" y="752" textLength="12.2" clip-path="url(#breeze-static-checks-line-30)">│</text><text class="breeze-static-checks-r7" x="451.4" y="752" textLength="988.2" clip-path="url(#breeze-static-checks-line-30)">|&#160;compile-www-assets&#160;|&#160;compile-www-assets-dev&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
-</text><text class="breeze-static-checks-r4" x="0" y="776.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-31)">│</text><text class="breeze-static-checks-r7" x="451.4" y="776.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-31)">create-missing-init-py-files-tests&#160;|&#160;debug-statements&#160;|&#160;detect-private-key&#160;|&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="776.4" textLength="12.2" clip-path="url(#bre [...]
-</text><text class="breeze-static-checks-r4" x="0" y="800.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-32)">│</text><text class="breeze-static-checks-r7" x="451.4" y="800.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-32)">doctoc&#160;|&#160;end-of-file-fixer&#160;|&#160;fix-encoding-pragma&#160;|&#160;flynt&#160;|&#160;identity&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x=" [...]
-</text><text class="breeze-static-checks-r4" x="0" y="825.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-33)">│</text><text class="breeze-static-checks-r7" x="451.4" y="825.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-33)">insert-license&#160;|&#160;lint-chart-schema&#160;|&#160;lint-css&#160;|&#160;lint-dockerfile&#160;|&#160;lint-helm-chart</text><text class="breeze-static-checks-r4" x="1451.8" y="825.2" textLength="12.2" clip-path="url(#breeze-static [...]
-</text><text class="breeze-static-checks-r4" x="0" y="849.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-34)">│</text><text class="breeze-static-checks-r7" x="451.4" y="849.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-34)">|&#160;lint-json-schema&#160;|&#160;lint-markdown&#160;|&#160;lint-openapi&#160;|&#160;mixed-line-ending&#160;|&#160;mypy-core</text><text class="breeze-static-checks-r4" x="1451.8" y="849.6" textLength="12.2" clip-path="url(#breeze-s [...]
-</text><text class="breeze-static-checks-r4" x="0" y="874" textLength="12.2" clip-path="url(#breeze-static-checks-line-35)">│</text><text class="breeze-static-checks-r7" x="451.4" y="874" textLength="988.2" clip-path="url(#breeze-static-checks-line-35)">|&#160;mypy-dev&#160;|&#160;mypy-docs&#160;|&#160;mypy-providers&#160;|&#160;pretty-format-json&#160;|&#160;python-no-log-warn</text><text class="breeze-static-checks-r4" x="1451.8" y="874" textLength="12.2" clip-path="url(#breeze-static- [...]
-</text><text class="breeze-static-checks-r4" x="0" y="898.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-36)">│</text><text class="breeze-static-checks-r7" x="451.4" y="898.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-36)">|&#160;replace-bad-characters&#160;|&#160;rst-backticks&#160;|&#160;ruff&#160;|&#160;shellcheck&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class [...]
-</text><text class="breeze-static-checks-r4" x="0" y="922.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-37)">│</text><text class="breeze-static-checks-r7" x="451.4" y="922.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-37)">trailing-whitespace&#160;|&#160;ts-compile-format-lint-www&#160;|&#160;update-black-version&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="922.8" textLength="12.2" clip [...]
-</text><text class="breeze-static-checks-r4" x="0" y="947.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-38)">│</text><text class="breeze-static-checks-r7" x="451.4" y="947.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-38)">update-breeze-cmd-output&#160;|&#160;update-breeze-readme-config-hash&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-chec [...]
-</text><text class="breeze-static-checks-r4" x="0" y="971.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-39)">│</text><text class="breeze-static-checks-r7" x="451.4" y="971.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-39)">update-common-sql-api-stubs&#160;|&#160;update-er-diagram&#160;|&#160;update-extras&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x=" [...]
-</text><text class="breeze-static-checks-r4" x="0" y="996" textLength="12.2" clip-path="url(#breeze-static-checks-line-40)">│</text><text class="breeze-static-checks-r7" x="451.4" y="996" textLength="988.2" clip-path="url(#breeze-static-checks-line-40)">update-in-the-wild-to-be-sorted&#160;|&#160;update-inlined-dockerfile-scripts&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="996" textLength="12.2"  [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1020.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-41)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1020.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-41)">update-installed-providers-to-be-sorted&#160;|&#160;update-local-yml-file&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8"  [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1044.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-42)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1044.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-42)">update-migration-references&#160;|&#160;update-providers-dependencies&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-ch [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1069.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-43)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1069.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-43)">update-spelling-wordlist-to-be-sorted&#160;|&#160;update-supported-versions&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="1069.2" [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1093.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-44)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1093.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-44)">update-vendored-in-k8s-json-schema&#160;|&#160;update-version&#160;|&#160;yamllint)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks- [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1118" textLength="12.2" clip-path="url(#breeze-static-checks-line-45)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1118" textLength="12.2" clip-path="url(#breeze-static-checks-line-45)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1118" textLength="61" clip-path="url(#breeze-static-checks-line-45)">-show</text><text class="breeze-static-checks-r5" x="97.6" y="1118" textLength="195.2" clip-path="url(#breeze-s [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1142.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-46)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1142.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-46)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1142.4" textLength="134.2" clip-path="url(#breeze-static-checks-line-46)">-initialize</text><text class="breeze-static-checks-r5" x="170.8" y="1142.4" textLength="146.4" clip-p [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1166.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-47)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1166.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-47)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1166.8" textLength="48.8" clip-path="url(#breeze-static-checks-line-47)">-max</text><text class="breeze-static-checks-r5" x="85.4" y="1166.8" textLength="292.8" clip-path="url( [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1191.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-48)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1191.2" textLength="854" clip-path="url(#breeze-static-checks-line-48)">(INTEGER&#160;RANGE)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1215.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-49)">│</text><text class="breeze-static-checks-r4" x="451.4" y="1215.6" textLength="854" clip-path="url(#breeze-static-checks-line-49)">[default:&#160;3;&#160;1&lt;=x&lt;=10]&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1240" textLength="1464" clip-path="url(#breeze-static-checks-line-50)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="breeze-static-checks-r2" x="1464" y="1240" textLength="12.2" clip-path="url(#breeze-static-checks-line-50)">
-</text><text class="breeze-static-checks-r4" x="0" y="1264.4" textLength="24.4" clip-path="url(#breeze-static-checks-line-51)">╭─</text><text class="breeze-static-checks-r4" x="24.4" y="1264.4" textLength="463.6" clip-path="url(#breeze-static-checks-line-51)">&#160;Selecting&#160;files&#160;to&#160;run&#160;the&#160;checks&#160;on&#160;</text><text class="breeze-static-checks-r4" x="488" y="1264.4" textLength="951.6" clip-path="url(#breeze-static-checks-line-51)">──────────────────────── [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1288.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-52)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1288.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-52)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1288.8" textLength="61" clip-path="url(#breeze-static-checks-line-52)">-file</text><text class="breeze-static-checks-r6" x="256.2" y="1288.8" textLength="24.4" clip-path="url(# [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1313.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-53)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1313.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-53)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1313.2" textLength="48.8" clip-path="url(#breeze-static-checks-line-53)">-all</text><text class="breeze-static-checks-r5" x="85.4" y="1313.2" textLength="73.2" clip-path="url(# [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1337.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-54)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1337.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-54)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1337.6" textLength="85.4" clip-path="url(#breeze-static-checks-line-54)">-commit</text><text class="breeze-static-checks-r5" x="122" y="1337.6" textLength="48.8" clip-path="url [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1362" textLength="12.2" clip-path="url(#breeze-static-checks-line-55)">│</text><text class="breeze-static-checks-r2" x="305" y="1362" textLength="183" clip-path="url(#breeze-static-checks-line-55)">exclusive&#160;with&#160;</text><text class="breeze-static-checks-r5" x="488" y="1362" textLength="12.2" clip-path="url(#breeze-static-checks-line-55)">-</text><text class="breeze-static-checks-r5" x="500.2" y="1362" textLength="61" clip-pa [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1386.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-56)">│</text><text class="breeze-static-checks-r7" x="305" y="1386.4" textLength="1134.6" clip-path="url(#breeze-static-checks-line-56)">(TEXT)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160 [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1410.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-57)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1410.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-57)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1410.8" textLength="61" clip-path="url(#breeze-static-checks-line-57)">-last</text><text class="breeze-static-checks-r5" x="97.6" y="1410.8" textLength="85.4" clip-path="url(#b [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1435.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-58)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1435.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-58)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1435.2" textLength="61" clip-path="url(#breeze-static-checks-line-58)">-only</text><text class="breeze-static-checks-r5" x="97.6" y="1435.2" textLength="134.2" clip-path="url(# [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1459.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-59)">│</text><text class="breeze-static-checks-r2" x="305" y="1459.6" textLength="1134.6" clip-path="url(#breeze-static-checks-line-59)">branch&#160;and&#160;HEAD&#160;of&#160;your&#160;branch.&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#1 [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1484" textLength="1464" clip-path="url(#breeze-static-checks-line-60)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="breeze-static-checks-r2" x="1464" y="1484" textLength="12.2" clip-path="url(#breeze-static-checks-line-60)">
-</text><text class="breeze-static-checks-r4" x="0" y="1508.4" textLength="24.4" clip-path="url(#breeze-static-checks-line-61)">╭─</text><text class="breeze-static-checks-r4" x="24.4" y="1508.4" textLength="463.6" clip-path="url(#breeze-static-checks-line-61)">&#160;Building&#160;image&#160;before&#160;running&#160;checks&#160;</text><text class="breeze-static-checks-r4" x="488" y="1508.4" textLength="951.6" clip-path="url(#breeze-static-checks-line-61)">────────────────────────────────── [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1532.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-62)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1532.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-62)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1532.8" textLength="61" clip-path="url(#breeze-static-checks-line-62)">-skip</text><text class="breeze-static-checks-r5" x="97.6" y="1532.8" textLength="146.4" clip-path="url(# [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1557.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-63)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1557.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-63)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1557.2" textLength="73.2" clip-path="url(#breeze-static-checks-line-63)">-force</text><text class="breeze-static-checks-r5" x="109.8" y="1557.2" textLength="73.2" clip-path="ur [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1581.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-64)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1581.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-64)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1581.6" textLength="73.2" clip-path="url(#breeze-static-checks-line-64)">-image</text><text class="breeze-static-checks-r5" x="109.8" y="1581.6" textLength="48.8" clip-path="ur [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1606" textLength="12.2" clip-path="url(#breeze-static-checks-line-65)">│</text><text class="breeze-static-checks-r4" x="329.4" y="1606" textLength="963.8" clip-path="url(#breeze-static-checks-line-65)">[default:&#160;latest]&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1630.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-66)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1630.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-66)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1630.4" textLength="85.4" clip-path="url(#breeze-static-checks-line-66)">-github</text><text class="breeze-static-checks-r5" x="122" y="1630.4" textLength="134.2" clip-path="ur [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1654.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-67)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1654.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-67)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1654.8" textLength="97.6" clip-path="url(#breeze-static-checks-line-67)">-builder</text><text class="breeze-static-checks-r2" x="329.4" y="1654.8" textLength="756.4" clip-path= [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1679.2" textLength="1464" clip-path="url(#breeze-static-checks-line-68)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="breeze-static-checks-r2" x="1464" y="1679.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-68)">
-</text><text class="breeze-static-checks-r4" x="0" y="1703.6" textLength="24.4" clip-path="url(#breeze-static-checks-line-69)">╭─</text><text class="breeze-static-checks-r4" x="24.4" y="1703.6" textLength="195.2" clip-path="url(#breeze-static-checks-line-69)">&#160;Common&#160;options&#160;</text><text class="breeze-static-checks-r4" x="219.6" y="1703.6" textLength="1220" clip-path="url(#breeze-static-checks-line-69)">────────────────────────────────────────────────────────────────────── [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1728" textLength="12.2" clip-path="url(#breeze-static-checks-line-70)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1728" textLength="12.2" clip-path="url(#breeze-static-checks-line-70)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1728" textLength="97.6" clip-path="url(#breeze-static-checks-line-70)">-verbose</text><text class="breeze-static-checks-r6" x="158.6" y="1728" textLength="24.4" clip-path="url(#bre [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1752.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-71)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1752.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-71)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1752.4" textLength="48.8" clip-path="url(#breeze-static-checks-line-71)">-dry</text><text class="breeze-static-checks-r5" x="85.4" y="1752.4" textLength="48.8" clip-path="url(# [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1776.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-72)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1776.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-72)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1776.8" textLength="61" clip-path="url(#breeze-static-checks-line-72)">-help</text><text class="breeze-static-checks-r6" x="158.6" y="1776.8" textLength="24.4" clip-path="url(# [...]
-</text><text class="breeze-static-checks-r4" x="0" y="1801.2" textLength="1464" clip-path="url(#breeze-static-checks-line-73)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="breeze-static-checks-r2" x="1464" y="1801.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-73)">
+</text><text class="breeze-static-checks-r4" x="0" y="215.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-8)">│</text><text class="breeze-static-checks-r7" x="451.4" y="215.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-8)">check-airflow-k8s-not-used&#160;|&#160;check-airflow-provider-compatibility&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="215.2" text [...]
+</text><text class="breeze-static-checks-r4" x="0" y="239.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-9)">│</text><text class="breeze-static-checks-r7" x="451.4" y="239.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-9)">check-apache-license-rat&#160;|&#160;check-base-operator-partial-arguments&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="239.6" [...]
+</text><text class="breeze-static-checks-r4" x="0" y="264" textLength="12.2" clip-path="url(#breeze-static-checks-line-10)">│</text><text class="breeze-static-checks-r7" x="451.4" y="264" textLength="988.2" clip-path="url(#breeze-static-checks-line-10)">check-base-operator-usage&#160;|&#160;check-boring-cyborg-configuration&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451 [...]
+</text><text class="breeze-static-checks-r4" x="0" y="288.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-11)">│</text><text class="breeze-static-checks-r7" x="451.4" y="288.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-11)">check-breeze-top-dependencies-limited&#160;|&#160;check-builtin-literals&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451. [...]
+</text><text class="breeze-static-checks-r4" x="0" y="312.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-12)">│</text><text class="breeze-static-checks-r7" x="451.4" y="312.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-12)">check-changelog-has-no-duplicates&#160;|&#160;check-cncf-k8s-only-for-executors&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="312.8" textLength="12.2" clip [...]
+</text><text class="breeze-static-checks-r4" x="0" y="337.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-13)">│</text><text class="breeze-static-checks-r7" x="451.4" y="337.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-13)">check-core-deprecation-classes&#160;|&#160;check-daysago-import-from-utils&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="337. [...]
+</text><text class="breeze-static-checks-r4" x="0" y="361.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-14)">│</text><text class="breeze-static-checks-r7" x="451.4" y="361.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-14)">check-decorated-operator-implements-custom-name&#160;|&#160;check-deferrable-default-value&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="361.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-14)">│</text [...]
+</text><text class="breeze-static-checks-r4" x="0" y="386" textLength="12.2" clip-path="url(#breeze-static-checks-line-15)">│</text><text class="breeze-static-checks-r7" x="451.4" y="386" textLength="988.2" clip-path="url(#breeze-static-checks-line-15)">|&#160;check-docstring-param-types&#160;|&#160;check-example-dags-urls&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text clas [...]
+</text><text class="breeze-static-checks-r4" x="0" y="410.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-16)">│</text><text class="breeze-static-checks-r7" x="451.4" y="410.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-16)">check-executables-have-shebangs&#160;|&#160;check-extra-packages-references&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="410.4" te [...]
+</text><text class="breeze-static-checks-r4" x="0" y="434.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-17)">│</text><text class="breeze-static-checks-r7" x="451.4" y="434.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-17)">check-extras-order&#160;|&#160;check-for-inclusive-language&#160;|&#160;check-hooks-apply&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="434.8" textLength=" [...]
+</text><text class="breeze-static-checks-r4" x="0" y="459.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-18)">│</text><text class="breeze-static-checks-r7" x="451.4" y="459.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-18)">check-incorrect-use-of-LoggingMixin&#160;|&#160;check-init-decorator-arguments&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="459.2" textLength="12.2" [...]
+</text><text class="breeze-static-checks-r4" x="0" y="483.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-19)">│</text><text class="breeze-static-checks-r7" x="451.4" y="483.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-19)">check-lazy-logging&#160;|&#160;check-links-to-example-dags-do-not-use-hardcoded-versions&#160;|&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="483.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-19)">│< [...]
+</text><text class="breeze-static-checks-r4" x="0" y="508" textLength="12.2" clip-path="url(#breeze-static-checks-line-20)">│</text><text class="breeze-static-checks-r7" x="451.4" y="508" textLength="988.2" clip-path="url(#breeze-static-checks-line-20)">check-merge-conflict&#160;|&#160;check-newsfragments-are-valid&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text> [...]
+</text><text class="breeze-static-checks-r4" x="0" y="532.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-21)">│</text><text class="breeze-static-checks-r7" x="451.4" y="532.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-21)">check-no-airflow-deprecation-in-providers&#160;|&#160;check-no-providers-in-core-examples&#160;|</text><text class="breeze-static-checks-r4" x="1451.8" y="532.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-21)">│</text [...]
+</text><text class="breeze-static-checks-r4" x="0" y="556.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-22)">│</text><text class="breeze-static-checks-r7" x="451.4" y="556.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-22)">check-no-relative-imports&#160;|&#160;check-only-new-session-with-provide-session&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="556.8" textLength="12.2" clip-path="url [...]
+</text><text class="breeze-static-checks-r4" x="0" y="581.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-23)">│</text><text class="breeze-static-checks-r7" x="451.4" y="581.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-23)">check-persist-credentials-disabled-in-github-workflows&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="bre [...]
+</text><text class="breeze-static-checks-r4" x="0" y="605.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-24)">│</text><text class="breeze-static-checks-r7" x="451.4" y="605.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-24)">check-pre-commit-information-consistent&#160;|&#160;check-provide-create-sessions-imports&#160;|</text><text class="breeze-static-checks-r4" x="1451.8" y="605.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-24)">│</text [...]
+</text><text class="breeze-static-checks-r4" x="0" y="630" textLength="12.2" clip-path="url(#breeze-static-checks-line-25)">│</text><text class="breeze-static-checks-r7" x="451.4" y="630" textLength="988.2" clip-path="url(#breeze-static-checks-line-25)">check-provider-yaml-valid&#160;|&#160;check-providers-init-file-missing&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451 [...]
+</text><text class="breeze-static-checks-r4" x="0" y="654.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-26)">│</text><text class="breeze-static-checks-r7" x="451.4" y="654.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-26)">check-providers-subpackages-init-file-exist&#160;|&#160;check-pydevd-left-in-code&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="654.4" textLength="12.2" clip-path="url [...]
+</text><text class="breeze-static-checks-r4" x="0" y="678.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-27)">│</text><text class="breeze-static-checks-r7" x="451.4" y="678.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-27)">check-revision-heads-map&#160;|&#160;check-safe-filter-usage-in-html&#160;|&#160;check-setup-order&#160;|&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="678.8" textLength="12.2" clip-path="url(#breeze-static-checks-li [...]
+</text><text class="breeze-static-checks-r4" x="0" y="703.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-28)">│</text><text class="breeze-static-checks-r7" x="451.4" y="703.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-28)">check-start-date-not-used-in-defaults&#160;|&#160;check-system-tests-present&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="703.2" textLen [...]
+</text><text class="breeze-static-checks-r4" x="0" y="727.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-29)">│</text><text class="breeze-static-checks-r7" x="451.4" y="727.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-29)">check-system-tests-tocs&#160;|&#160;check-tests-unittest-testcase&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class [...]
+</text><text class="breeze-static-checks-r4" x="0" y="752" textLength="12.2" clip-path="url(#breeze-static-checks-line-30)">│</text><text class="breeze-static-checks-r7" x="451.4" y="752" textLength="988.2" clip-path="url(#breeze-static-checks-line-30)">check-urlparse-usage-in-code&#160;|&#160;check-usage-of-re2-over-re&#160;|&#160;check-xml&#160;|&#160;codespell</text><text class="breeze-static-checks-r4" x="1451.8" y="752" textLength="12.2" clip-path="url(#breeze-static-checks-line-30) [...]
+</text><text class="breeze-static-checks-r4" x="0" y="776.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-31)">│</text><text class="breeze-static-checks-r7" x="451.4" y="776.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-31)">|&#160;compile-www-assets&#160;|&#160;compile-www-assets-dev&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160 [...]
+</text><text class="breeze-static-checks-r4" x="0" y="800.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-32)">│</text><text class="breeze-static-checks-r7" x="451.4" y="800.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-32)">create-missing-init-py-files-tests&#160;|&#160;debug-statements&#160;|&#160;detect-private-key&#160;|&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="800.8" textLength="12.2" clip-path="url(#bre [...]
+</text><text class="breeze-static-checks-r4" x="0" y="825.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-33)">│</text><text class="breeze-static-checks-r7" x="451.4" y="825.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-33)">doctoc&#160;|&#160;end-of-file-fixer&#160;|&#160;fix-encoding-pragma&#160;|&#160;flynt&#160;|&#160;identity&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x=" [...]
+</text><text class="breeze-static-checks-r4" x="0" y="849.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-34)">│</text><text class="breeze-static-checks-r7" x="451.4" y="849.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-34)">insert-license&#160;|&#160;lint-chart-schema&#160;|&#160;lint-css&#160;|&#160;lint-dockerfile&#160;|&#160;lint-helm-chart</text><text class="breeze-static-checks-r4" x="1451.8" y="849.6" textLength="12.2" clip-path="url(#breeze-static [...]
+</text><text class="breeze-static-checks-r4" x="0" y="874" textLength="12.2" clip-path="url(#breeze-static-checks-line-35)">│</text><text class="breeze-static-checks-r7" x="451.4" y="874" textLength="988.2" clip-path="url(#breeze-static-checks-line-35)">|&#160;lint-json-schema&#160;|&#160;lint-markdown&#160;|&#160;lint-openapi&#160;|&#160;mixed-line-ending&#160;|&#160;mypy-core</text><text class="breeze-static-checks-r4" x="1451.8" y="874" textLength="12.2" clip-path="url(#breeze-static- [...]
+</text><text class="breeze-static-checks-r4" x="0" y="898.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-36)">│</text><text class="breeze-static-checks-r7" x="451.4" y="898.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-36)">|&#160;mypy-dev&#160;|&#160;mypy-docs&#160;|&#160;mypy-providers&#160;|&#160;pretty-format-json&#160;|&#160;python-no-log-warn</text><text class="breeze-static-checks-r4" x="1451.8" y="898.4" textLength="12.2" clip-path="url(#breeze-s [...]
+</text><text class="breeze-static-checks-r4" x="0" y="922.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-37)">│</text><text class="breeze-static-checks-r7" x="451.4" y="922.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-37)">|&#160;replace-bad-characters&#160;|&#160;rst-backticks&#160;|&#160;ruff&#160;|&#160;shellcheck&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class [...]
+</text><text class="breeze-static-checks-r4" x="0" y="947.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-38)">│</text><text class="breeze-static-checks-r7" x="451.4" y="947.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-38)">trailing-whitespace&#160;|&#160;ts-compile-format-lint-www&#160;|&#160;update-black-version&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="947.2" textLength="12.2" clip [...]
+</text><text class="breeze-static-checks-r4" x="0" y="971.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-39)">│</text><text class="breeze-static-checks-r7" x="451.4" y="971.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-39)">update-breeze-cmd-output&#160;|&#160;update-breeze-readme-config-hash&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-chec [...]
+</text><text class="breeze-static-checks-r4" x="0" y="996" textLength="12.2" clip-path="url(#breeze-static-checks-line-40)">│</text><text class="breeze-static-checks-r7" x="451.4" y="996" textLength="988.2" clip-path="url(#breeze-static-checks-line-40)">update-common-sql-api-stubs&#160;|&#160;update-er-diagram&#160;|&#160;update-extras&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451 [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1020.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-41)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1020.4" textLength="988.2" clip-path="url(#breeze-static-checks-line-41)">update-in-the-wild-to-be-sorted&#160;|&#160;update-inlined-dockerfile-scripts&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="1020.4" textLengt [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1044.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-42)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1044.8" textLength="988.2" clip-path="url(#breeze-static-checks-line-42)">update-installed-providers-to-be-sorted&#160;|&#160;update-local-yml-file&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8"  [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1069.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-43)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1069.2" textLength="988.2" clip-path="url(#breeze-static-checks-line-43)">update-migration-references&#160;|&#160;update-providers-dependencies&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-ch [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1093.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-44)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1093.6" textLength="988.2" clip-path="url(#breeze-static-checks-line-44)">update-spelling-wordlist-to-be-sorted&#160;|&#160;update-supported-versions&#160;|&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4" x="1451.8" y="1093.6" [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1118" textLength="12.2" clip-path="url(#breeze-static-checks-line-45)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1118" textLength="988.2" clip-path="url(#breeze-static-checks-line-45)">update-vendored-in-k8s-json-schema&#160;|&#160;update-version&#160;|&#160;yamllint)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;</text><text class="breeze-static-checks-r4"  [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1142.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-46)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1142.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-46)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1142.4" textLength="61" clip-path="url(#breeze-static-checks-line-46)">-show</text><text class="breeze-static-checks-r5" x="97.6" y="1142.4" textLength="195.2" clip-path="url(# [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1166.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-47)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1166.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-47)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1166.8" textLength="134.2" clip-path="url(#breeze-static-checks-line-47)">-initialize</text><text class="breeze-static-checks-r5" x="170.8" y="1166.8" textLength="146.4" clip-p [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1191.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-48)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1191.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-48)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1191.2" textLength="48.8" clip-path="url(#breeze-static-checks-line-48)">-max</text><text class="breeze-static-checks-r5" x="85.4" y="1191.2" textLength="292.8" clip-path="url( [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1215.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-49)">│</text><text class="breeze-static-checks-r7" x="451.4" y="1215.6" textLength="854" clip-path="url(#breeze-static-checks-line-49)">(INTEGER&#160;RANGE)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#16 [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1240" textLength="12.2" clip-path="url(#breeze-static-checks-line-50)">│</text><text class="breeze-static-checks-r4" x="451.4" y="1240" textLength="854" clip-path="url(#breeze-static-checks-line-50)">[default:&#160;3;&#160;1&lt;=x&lt;=10]&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&# [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1264.4" textLength="1464" clip-path="url(#breeze-static-checks-line-51)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="breeze-static-checks-r2" x="1464" y="1264.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-51)">
+</text><text class="breeze-static-checks-r4" x="0" y="1288.8" textLength="24.4" clip-path="url(#breeze-static-checks-line-52)">╭─</text><text class="breeze-static-checks-r4" x="24.4" y="1288.8" textLength="463.6" clip-path="url(#breeze-static-checks-line-52)">&#160;Selecting&#160;files&#160;to&#160;run&#160;the&#160;checks&#160;on&#160;</text><text class="breeze-static-checks-r4" x="488" y="1288.8" textLength="951.6" clip-path="url(#breeze-static-checks-line-52)">──────────────────────── [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1313.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-53)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1313.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-53)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1313.2" textLength="61" clip-path="url(#breeze-static-checks-line-53)">-file</text><text class="breeze-static-checks-r6" x="256.2" y="1313.2" textLength="24.4" clip-path="url(# [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1337.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-54)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1337.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-54)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1337.6" textLength="48.8" clip-path="url(#breeze-static-checks-line-54)">-all</text><text class="breeze-static-checks-r5" x="85.4" y="1337.6" textLength="73.2" clip-path="url(# [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1362" textLength="12.2" clip-path="url(#breeze-static-checks-line-55)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1362" textLength="12.2" clip-path="url(#breeze-static-checks-line-55)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1362" textLength="85.4" clip-path="url(#breeze-static-checks-line-55)">-commit</text><text class="breeze-static-checks-r5" x="122" y="1362" textLength="48.8" clip-path="url(#breeze [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1386.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-56)">│</text><text class="breeze-static-checks-r2" x="305" y="1386.4" textLength="183" clip-path="url(#breeze-static-checks-line-56)">exclusive&#160;with&#160;</text><text class="breeze-static-checks-r5" x="488" y="1386.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-56)">-</text><text class="breeze-static-checks-r5" x="500.2" y="1386.4" textLength="61" [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1410.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-57)">│</text><text class="breeze-static-checks-r7" x="305" y="1410.8" textLength="1134.6" clip-path="url(#breeze-static-checks-line-57)">(TEXT)&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160 [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1435.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-58)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1435.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-58)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1435.2" textLength="61" clip-path="url(#breeze-static-checks-line-58)">-last</text><text class="breeze-static-checks-r5" x="97.6" y="1435.2" textLength="85.4" clip-path="url(#b [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1459.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-59)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1459.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-59)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1459.6" textLength="61" clip-path="url(#breeze-static-checks-line-59)">-only</text><text class="breeze-static-checks-r5" x="97.6" y="1459.6" textLength="134.2" clip-path="url(# [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1484" textLength="12.2" clip-path="url(#breeze-static-checks-line-60)">│</text><text class="breeze-static-checks-r2" x="305" y="1484" textLength="1134.6" clip-path="url(#breeze-static-checks-line-60)">branch&#160;and&#160;HEAD&#160;of&#160;your&#160;branch.&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;& [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1508.4" textLength="1464" clip-path="url(#breeze-static-checks-line-61)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="breeze-static-checks-r2" x="1464" y="1508.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-61)">
+</text><text class="breeze-static-checks-r4" x="0" y="1532.8" textLength="24.4" clip-path="url(#breeze-static-checks-line-62)">╭─</text><text class="breeze-static-checks-r4" x="24.4" y="1532.8" textLength="463.6" clip-path="url(#breeze-static-checks-line-62)">&#160;Building&#160;image&#160;before&#160;running&#160;checks&#160;</text><text class="breeze-static-checks-r4" x="488" y="1532.8" textLength="951.6" clip-path="url(#breeze-static-checks-line-62)">────────────────────────────────── [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1557.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-63)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1557.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-63)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1557.2" textLength="61" clip-path="url(#breeze-static-checks-line-63)">-skip</text><text class="breeze-static-checks-r5" x="97.6" y="1557.2" textLength="146.4" clip-path="url(# [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1581.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-64)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1581.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-64)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1581.6" textLength="73.2" clip-path="url(#breeze-static-checks-line-64)">-force</text><text class="breeze-static-checks-r5" x="109.8" y="1581.6" textLength="73.2" clip-path="ur [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1606" textLength="12.2" clip-path="url(#breeze-static-checks-line-65)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1606" textLength="12.2" clip-path="url(#breeze-static-checks-line-65)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1606" textLength="73.2" clip-path="url(#breeze-static-checks-line-65)">-image</text><text class="breeze-static-checks-r5" x="109.8" y="1606" textLength="48.8" clip-path="url(#breez [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1630.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-66)">│</text><text class="breeze-static-checks-r4" x="329.4" y="1630.4" textLength="963.8" clip-path="url(#breeze-static-checks-line-66)">[default:&#160;latest]&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160;&#160; [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1654.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-67)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1654.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-67)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1654.8" textLength="85.4" clip-path="url(#breeze-static-checks-line-67)">-github</text><text class="breeze-static-checks-r5" x="122" y="1654.8" textLength="134.2" clip-path="ur [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1679.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-68)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1679.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-68)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1679.2" textLength="97.6" clip-path="url(#breeze-static-checks-line-68)">-builder</text><text class="breeze-static-checks-r2" x="329.4" y="1679.2" textLength="756.4" clip-path= [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1703.6" textLength="1464" clip-path="url(#breeze-static-checks-line-69)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="breeze-static-checks-r2" x="1464" y="1703.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-69)">
+</text><text class="breeze-static-checks-r4" x="0" y="1728" textLength="24.4" clip-path="url(#breeze-static-checks-line-70)">╭─</text><text class="breeze-static-checks-r4" x="24.4" y="1728" textLength="195.2" clip-path="url(#breeze-static-checks-line-70)">&#160;Common&#160;options&#160;</text><text class="breeze-static-checks-r4" x="219.6" y="1728" textLength="1220" clip-path="url(#breeze-static-checks-line-70)">──────────────────────────────────────────────────────────────────────────── [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1752.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-71)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1752.4" textLength="12.2" clip-path="url(#breeze-static-checks-line-71)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1752.4" textLength="97.6" clip-path="url(#breeze-static-checks-line-71)">-verbose</text><text class="breeze-static-checks-r6" x="158.6" y="1752.4" textLength="24.4" clip-path=" [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1776.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-72)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1776.8" textLength="12.2" clip-path="url(#breeze-static-checks-line-72)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1776.8" textLength="48.8" clip-path="url(#breeze-static-checks-line-72)">-dry</text><text class="breeze-static-checks-r5" x="85.4" y="1776.8" textLength="48.8" clip-path="url(# [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1801.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-73)">│</text><text class="breeze-static-checks-r5" x="24.4" y="1801.2" textLength="12.2" clip-path="url(#breeze-static-checks-line-73)">-</text><text class="breeze-static-checks-r5" x="36.6" y="1801.2" textLength="61" clip-path="url(#breeze-static-checks-line-73)">-help</text><text class="breeze-static-checks-r6" x="158.6" y="1801.2" textLength="24.4" clip-path="url(# [...]
+</text><text class="breeze-static-checks-r4" x="0" y="1825.6" textLength="1464" clip-path="url(#breeze-static-checks-line-74)">╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯</text><text class="breeze-static-checks-r2" x="1464" y="1825.6" textLength="12.2" clip-path="url(#breeze-static-checks-line-74)">
 </text>
     </g>
     </g>
diff --git a/airflow/kubernetes/pod_launcher.py b/kubernetes_tests/conftest.py
similarity index 79%
rename from airflow/kubernetes/pod_launcher.py
rename to kubernetes_tests/conftest.py
index 25a97921c3..3c861d00ba 100644
--- a/airflow/kubernetes/pod_launcher.py
+++ b/kubernetes_tests/conftest.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,11 +14,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""
-This module is deprecated.
-
-Please use :mod:`kubernetes.client.models` for V1ResourceRequirements and Port.
-"""
 from __future__ import annotations
 
-from airflow.kubernetes.pod_launcher_deprecated import PodLauncher, PodStatus  # noqa: autoflake
+import pytest
+
+
+@pytest.fixture(autouse=True)
+def initialize_providers_manager():
+    from airflow.providers_manager import ProvidersManager
+
+    ProvidersManager().initialize_providers_configuration()
diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes_tests/test_kubernetes_pod_operator.py
index 2b66a3082b..d4a621055e 100644
--- a/kubernetes_tests/test_kubernetes_pod_operator.py
+++ b/kubernetes_tests/test_kubernetes_pod_operator.py
@@ -691,7 +691,7 @@ class TestKubernetesPodOperatorSystem:
 
     def test_pod_template_file_system(self, mock_get_connection):
         """Note: this test requires that you have a namespace ``mem-example`` in your cluster."""
-        fixture = sys.path[0] + "/tests/kubernetes/basic_pod.yaml"
+        fixture = sys.path[0] + "/tests/providers/cncf/kubernetes/basic_pod.yaml"
         k = KubernetesPodOperator(
             task_id=str(uuid4()),
             in_cluster=False,
@@ -713,7 +713,7 @@ class TestKubernetesPodOperatorSystem:
         ],
     )
     def test_pod_template_file_with_overrides_system(self, env_vars, test_label, mock_get_connection):
-        fixture = sys.path[0] + "/tests/kubernetes/basic_pod.yaml"
+        fixture = sys.path[0] + "/tests/providers/cncf/kubernetes/basic_pod.yaml"
         k = KubernetesPodOperator(
             task_id=str(uuid4()),
             labels=self.labels,
@@ -740,7 +740,7 @@ class TestKubernetesPodOperatorSystem:
         assert result == {"hello": "world"}
 
     def test_pod_template_file_with_full_pod_spec(self, test_label, mock_get_connection):
-        fixture = sys.path[0] + "/tests/kubernetes/basic_pod.yaml"
+        fixture = sys.path[0] + "/tests/providers/cncf/kubernetes/basic_pod.yaml"
         pod_spec = k8s.V1Pod(
             metadata=k8s.V1ObjectMeta(
                 labels={"test_label": test_label, "fizz": "buzz"},
@@ -898,7 +898,7 @@ class TestKubernetesPodOperatorSystem:
         hook_mock.return_value.is_in_cluster = False
         hook_mock.return_value.get_connection.return_value = Connection(conn_id="kubernetes_default")
         extract_xcom_mock.return_value = "{}"
-        path = sys.path[0] + "/tests/kubernetes/pod.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod.yaml"
         k = KubernetesPodOperator(
             task_id=str(uuid4()),
             labels=self.labels,
diff --git a/newsfragments/32767.significant.rst b/newsfragments/32767.significant.rst
new file mode 100644
index 0000000000..ec4420488a
--- /dev/null
+++ b/newsfragments/32767.significant.rst
@@ -0,0 +1,7 @@
+The Kubernetes, Celery, CeleryKubernetes, LocalKubernetes, and Dask executors are moved to corresponding providers.
+
+In order to use the executors, you need to install the providers:
+
+* for Celery executors you need to install ``apache-airflow-providers-celery`` package >= 3.3.0
+* for Kubernetes executors you need to install ``apache-airflow-providers-cncf-kubernetes`` package >= 7.4.0
+* For Dask executors you need to install ``apache-airflow-providers-daskexecutor`` package in any version
diff --git a/scripts/ci/pre_commit/pre_commit_check_airflow_k8s_not_used.py b/scripts/ci/pre_commit/pre_commit_check_airflow_k8s_not_used.py
new file mode 100755
index 0000000000..e03c3009fd
--- /dev/null
+++ b/scripts/ci/pre_commit/pre_commit_check_airflow_k8s_not_used.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import ast
+import sys
+from typing import NamedTuple
+
+from rich.console import Console
+
+console = Console(color_system="standard", width=200)
+
+
+class ImportTuple(NamedTuple):
+    module: list[str]
+    name: list[str]
+    alias: str
+
+
+def get_imports(path: str):
+    with open(path) as fh:
+        root = ast.parse(fh.read(), path)
+
+    for node in ast.iter_child_nodes(root):
+        if isinstance(node, ast.Import):
+            module: list[str] = node.names[0].name.split(".") if node.names else []
+        elif isinstance(node, ast.ImportFrom) and node.module:
+            module = node.module.split(".")
+        else:
+            continue
+
+        for n in node.names:  # type: ignore[attr-defined]
+            yield ImportTuple(module=module, name=n.name.split("."), alias=n.asname)
+
+
+errors: list[str] = []
+
+
+def main() -> int:
+    for path in sys.argv[1:]:
+        import_count = 0
+        local_error_count = 0
+        for imp in get_imports(path):
+            import_count += 1
+            if len(imp.module) > 1:
+                if imp.module[:2] == ["airflow", "kubernetes"]:
+                    local_error_count += 1
+                    errors.append(f"{path}: ({'.'.join(imp.module)})")
+        console.print(f"[blue]{path}:[/] Import count: {import_count}, error_count {local_error_count}")
+    if errors:
+        console.print(
+            "[red]Some files imports from `airflow.kubernetes`.[/]\n"
+            "You should only import kubernetes code from `airflow.providers.cncf.kubernetes`."
+        )
+        console.print("Error summary:")
+        for error in errors:
+            console.print(error)
+        return 1
+    else:
+        console.print("[green]All good!")
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/scripts/ci/pre_commit/pre_commit_check_cncf_k8s_used_for_k8s_executor_only.py b/scripts/ci/pre_commit/pre_commit_check_cncf_k8s_used_for_k8s_executor_only.py
new file mode 100755
index 0000000000..0117c07c0c
--- /dev/null
+++ b/scripts/ci/pre_commit/pre_commit_check_cncf_k8s_used_for_k8s_executor_only.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import ast
+import sys
+from typing import NamedTuple
+
+from rich.console import Console
+
+console = Console(color_system="standard", width=200)
+
+
+class ImportTuple(NamedTuple):
+    module: list[str]
+    name: list[str]
+    alias: str
+
+
+def get_imports(path: str):
+    with open(path) as fh:
+        root = ast.parse(fh.read(), path)
+
+    for node in ast.iter_child_nodes(root):
+        if isinstance(node, ast.Import):
+            module: list[str] = node.names[0].name.split(".") if node.names else []
+        elif isinstance(node, ast.ImportFrom) and node.module:
+            module = node.module.split(".")
+        else:
+            continue
+
+        for n in node.names:  # type: ignore[attr-defined]
+            yield ImportTuple(module=module, name=n.name.split("."), alias=n.asname)
+
+
+errors: list[str] = []
+
+EXCEPTIONS = ["airflow/cli/commands/kubernetes_command.py"]
+
+
+def main() -> int:
+    for path in sys.argv[1:]:
+        import_count = 0
+        local_error_count = 0
+        for imp in get_imports(path):
+            import_count += 1
+            if len(imp.module) > 3:
+                if imp.module[:4] == ["airflow", "providers", "cncf", "kubernetes"]:
+                    if path not in EXCEPTIONS:
+                        local_error_count += 1
+                        errors.append(f"{path}: ({'.'.join(imp.module)})")
+        console.print(f"[blue]{path}:[/] Import count: {import_count}, error_count {local_error_count}")
+    if errors:
+        console.print(
+            "[red]Some files imports from `airflow.providers.cncf.kubernetes` and they are not allowed.[/]\n"
+            "Only few k8s executors exceptions are allowed to use `airflow.providers.cncf.kubernetes`."
+        )
+        console.print("Error summary:")
+        for error in errors:
+            console.print(error)
+        return 1
+    else:
+        console.print("[green]All good!")
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/scripts/in_container/run_provider_yaml_files_check.py b/scripts/in_container/run_provider_yaml_files_check.py
index b4c83b2d1b..e2caeea93b 100755
--- a/scripts/in_container/run_provider_yaml_files_check.py
+++ b/scripts/in_container/run_provider_yaml_files_check.py
@@ -37,6 +37,7 @@ from rich.console import Console
 from tabulate import tabulate
 
 from airflow.cli.commands.info_command import Architecture
+from airflow.providers_manager import ProvidersManager
 
 # Those are deprecated modules that contain removed Hooks/Sensors/Operators that we left in the code
 # so that users can get a very specific error message when they try to use them.
@@ -489,6 +490,7 @@ def check_providers_have_all_documentation_files(yaml_files: dict[str, dict]):
 
 
 if __name__ == "__main__":
+    ProvidersManager().initialize_providers_configuration()
     architecture = Architecture.get_current()
     console.print(f"Verifying packages on {architecture} architecture. Platform: {platform.machine()}.")
     provider_files_pattern = pathlib.Path(ROOT_DIR).glob("airflow/providers/**/provider.yaml")
diff --git a/setup.py b/setup.py
index 837ef85513..e6ef7ab871 100644
--- a/setup.py
+++ b/setup.py
@@ -730,9 +730,6 @@ EXTRAS_DEPENDENCIES = sort_extras_dependencies()
 # Those providers do not have dependency on airflow2.0 because that would lead to circular dependencies.
 # This is not a problem for PIP but some tools (pipdeptree) show those as a warning.
 PREINSTALLED_PROVIDERS = [
-    # TODO: When we release 3.3.0 version of celery provider we should change it to "celery>=3.3.0" here
-    #       In order to make sure executors are available in the celery provider
-    "celery",
     "common.sql",
     "ftp",
     "http",
diff --git a/tests/cli/commands/test_kubernetes_command.py b/tests/cli/commands/test_kubernetes_command.py
index eb75f56533..1f76220f52 100644
--- a/tests/cli/commands/test_kubernetes_command.py
+++ b/tests/cli/commands/test_kubernetes_command.py
@@ -64,7 +64,7 @@ class TestCleanUpPodsCommand:
         cls.parser = cli_parser.get_parser()
 
     @mock.patch("kubernetes.client.CoreV1Api.delete_namespaced_pod")
-    @mock.patch("airflow.kubernetes.kube_client.config.load_incluster_config")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.config.load_incluster_config")
     def test_delete_pod(self, load_incluster_config, delete_namespaced_pod):
         kubernetes_command._delete_pod("dummy", "awesome-namespace")
         delete_namespaced_pod.assert_called_with(body=mock.ANY, name="dummy", namespace="awesome-namespace")
@@ -72,7 +72,7 @@ class TestCleanUpPodsCommand:
 
     @mock.patch("airflow.cli.commands.kubernetes_command._delete_pod")
     @mock.patch("kubernetes.client.CoreV1Api.list_namespaced_pod")
-    @mock.patch("airflow.kubernetes.kube_client.config.load_incluster_config")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.config.load_incluster_config")
     def test_running_pods_are_not_cleaned(self, load_incluster_config, list_namespaced_pod, delete_pod):
         pod1 = MagicMock()
         pod1.metadata.name = "dummy"
@@ -94,7 +94,7 @@ class TestCleanUpPodsCommand:
 
     @mock.patch("airflow.cli.commands.kubernetes_command._delete_pod")
     @mock.patch("kubernetes.client.CoreV1Api.list_namespaced_pod")
-    @mock.patch("airflow.kubernetes.kube_client.config.load_incluster_config")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.config.load_incluster_config")
     def test_cleanup_succeeded_pods(self, load_incluster_config, list_namespaced_pod, delete_pod):
         pod1 = MagicMock()
         pod1.metadata.name = "dummy"
diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py
index 74a7dcea0d..a96c5cbddf 100644
--- a/tests/cli/commands/test_task_command.py
+++ b/tests/cli/commands/test_task_command.py
@@ -23,6 +23,7 @@ import logging
 import os
 import re
 import shutil
+import sys
 import tempfile
 import unittest
 from argparse import ArgumentParser
@@ -48,6 +49,7 @@ from airflow.utils import timezone
 from airflow.utils.session import create_session
 from airflow.utils.state import State
 from airflow.utils.types import DagRunType
+from setup import AIRFLOW_SOURCES_ROOT
 from tests.test_utils.config import conf_vars
 from tests.test_utils.db import clear_db_pools, clear_db_runs
 
@@ -747,9 +749,11 @@ class TestLogsfromTaskRunCommand:
         """
         import subprocess
 
-        with mock.patch.dict("os.environ", AIRFLOW_IS_K8S_EXECUTOR_POD=is_k8s):
+        with mock.patch.dict(
+            "os.environ", AIRFLOW_IS_K8S_EXECUTOR_POD=is_k8s, PYTHONPATH=os.fspath(AIRFLOW_SOURCES_ROOT)
+        ):
             with subprocess.Popen(
-                args=["airflow", *self.task_args, "-S", self.dag_path],
+                args=[sys.executable, "-m", "airflow", *self.task_args, "-S", self.dag_path],
                 stdout=subprocess.PIPE,
                 stderr=subprocess.PIPE,
             ) as process:
diff --git a/tests/models/test_renderedtifields.py b/tests/models/test_renderedtifields.py
index fb13f7a885..7a312a4c67 100644
--- a/tests/models/test_renderedtifields.py
+++ b/tests/models/test_renderedtifields.py
@@ -24,14 +24,11 @@ from datetime import date, timedelta
 from unittest import mock
 
 import pytest
-from sqlalchemy.orm.session import make_transient
 
 from airflow import settings
-from airflow.configuration import TEST_DAGS_FOLDER
 from airflow.models import Variable
 from airflow.models.renderedtifields import RenderedTaskInstanceFields as RTIF
 from airflow.operators.bash import BashOperator
-from airflow.utils.session import create_session
 from airflow.utils.timezone import datetime
 from tests.test_utils.asserts import assert_queries_count
 from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_rendered_ti_fields
@@ -302,51 +299,6 @@ class TestRenderedTaskInstanceFields:
             {"bash_command": "echo test_val_updated", "env": None},
         ) == result_updated
 
-    @mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"})
-    @mock.patch("airflow.utils.log.secrets_masker.redact", autospec=True, side_effect=lambda d, _=None: d)
-    def test_get_k8s_pod_yaml(self, redact, dag_maker):
-        """
-        Test that k8s_pod_yaml is rendered correctly, stored in the Database,
-        and are correctly fetched using RTIF.get_k8s_pod_yaml
-        """
-        with dag_maker("test_get_k8s_pod_yaml") as dag:
-            task = BashOperator(task_id="test", bash_command="echo hi")
-        dr = dag_maker.create_dagrun()
-        dag.fileloc = TEST_DAGS_FOLDER + "/test_get_k8s_pod_yaml.py"
-
-        ti = dr.task_instances[0]
-        ti.task = task
-
-        render_k8s_pod_yaml = mock.patch.object(
-            ti, "render_k8s_pod_yaml", return_value={"I'm a": "pod"}
-        ).start()
-
-        rtif = RTIF(ti=ti)
-
-        assert ti.dag_id == rtif.dag_id
-        assert ti.task_id == rtif.task_id
-        assert ti.run_id == rtif.run_id
-
-        expected_pod_yaml = {"I'm a": "pod"}
-
-        assert rtif.k8s_pod_yaml == render_k8s_pod_yaml.return_value
-        # K8s pod spec dict was passed to redact
-        redact.assert_any_call(rtif.k8s_pod_yaml)
-
-        with create_session() as session:
-            session.add(rtif)
-            session.flush()
-
-            assert expected_pod_yaml == RTIF.get_k8s_pod_yaml(ti=ti, session=session)
-            make_transient(ti)
-            # "Delete" it from the DB
-            session.rollback()
-
-            # Test the else part of get_k8s_pod_yaml
-            # i.e. for the TIs that are not stored in RTIF table
-            # Fetching them will return None
-            assert RTIF.get_k8s_pod_yaml(ti=ti, session=session) is None
-
     @mock.patch.dict(os.environ, {"AIRFLOW_VAR_API_KEY": "secret"})
     @mock.patch("airflow.utils.log.secrets_masker.redact", autospec=True)
     def test_redact(self, redact, dag_maker):
diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py
index 48e1509057..9e7f5eb48a 100644
--- a/tests/models/test_taskinstance.py
+++ b/tests/models/test_taskinstance.py
@@ -86,7 +86,6 @@ from airflow.utils.state import DagRunState, State, TaskInstanceState
 from airflow.utils.task_group import TaskGroup
 from airflow.utils.types import DagRunType
 from airflow.utils.xcom import XCOM_RETURN_KEY
-from airflow.version import version
 from tests.models import DEFAULT_DATE, TEST_DAGS_FOLDER
 from tests.test_utils import db
 from tests.test_utils.config import conf_vars
@@ -3000,86 +2999,6 @@ class TestTaskInstance:
         with create_session() as session:
             session.query(RenderedTaskInstanceFields).delete()
 
-    @mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"})
-    @mock.patch("airflow.settings.pod_mutation_hook")
-    def test_render_k8s_pod_yaml(self, pod_mutation_hook, create_task_instance):
-        ti = create_task_instance(
-            dag_id="test_render_k8s_pod_yaml",
-            run_id="test_run_id",
-            task_id="op1",
-            execution_date=DEFAULT_DATE,
-        )
-
-        expected_pod_spec = {
-            "metadata": {
-                "annotations": {
-                    "dag_id": "test_render_k8s_pod_yaml",
-                    "run_id": "test_run_id",
-                    "task_id": "op1",
-                    "try_number": "1",
-                },
-                "labels": {
-                    "airflow-worker": "0",
-                    "airflow_version": version,
-                    "dag_id": "test_render_k8s_pod_yaml",
-                    "run_id": "test_run_id",
-                    "kubernetes_executor": "True",
-                    "task_id": "op1",
-                    "try_number": "1",
-                },
-                "name": mock.ANY,
-                "namespace": "default",
-            },
-            "spec": {
-                "containers": [
-                    {
-                        "args": [
-                            "airflow",
-                            "tasks",
-                            "run",
-                            "test_render_k8s_pod_yaml",
-                            "op1",
-                            "test_run_id",
-                            "--subdir",
-                            __file__,
-                        ],
-                        "name": "base",
-                        "env": [{"name": "AIRFLOW_IS_K8S_EXECUTOR_POD", "value": "True"}],
-                    }
-                ]
-            },
-        }
-
-        assert ti.render_k8s_pod_yaml() == expected_pod_spec
-        pod_mutation_hook.assert_called_once_with(mock.ANY)
-
-    @mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"})
-    @mock.patch.object(RenderedTaskInstanceFields, "get_k8s_pod_yaml")
-    def test_get_rendered_k8s_spec(self, rtif_get_k8s_pod_yaml, create_task_instance):
-        # Create new TI for the same Task
-        ti = create_task_instance()
-
-        patcher = mock.patch.object(ti, "render_k8s_pod_yaml", autospec=True)
-
-        fake_spec = {"ermagawds": "pods"}
-
-        session = mock.Mock()
-
-        with patcher as render_k8s_pod_yaml:
-            rtif_get_k8s_pod_yaml.return_value = fake_spec
-            assert ti.get_rendered_k8s_spec(session) == fake_spec
-
-            rtif_get_k8s_pod_yaml.assert_called_once_with(ti, session=session)
-            render_k8s_pod_yaml.assert_not_called()
-
-            # Now test that when we _dont_ find it in the DB, it calls render_k8s_pod_yaml
-            rtif_get_k8s_pod_yaml.return_value = None
-            render_k8s_pod_yaml.return_value = fake_spec
-
-            assert ti.get_rendered_k8s_spec(session) == fake_spec
-
-            render_k8s_pod_yaml.assert_called_once()
-
     def test_set_state_up_for_retry(self, create_task_instance):
         ti = create_task_instance(state=State.RUNNING)
 
diff --git a/tests/providers/apache/spark/hooks/test_spark_submit.py b/tests/providers/apache/spark/hooks/test_spark_submit.py
index 052b15aeb2..d6b2a98509 100644
--- a/tests/providers/apache/spark/hooks/test_spark_submit.py
+++ b/tests/providers/apache/spark/hooks/test_spark_submit.py
@@ -773,7 +773,7 @@ class TestSparkSubmitHook:
         assert kill_cmd[3] == "--kill"
         assert kill_cmd[4] == "driver-20171128111415-0001"
 
-    @patch("airflow.kubernetes.kube_client.get_kube_client")
+    @patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     @patch("airflow.providers.apache.spark.hooks.spark_submit.subprocess.Popen")
     def test_k8s_process_on_kill(self, mock_popen, mock_client_method):
         # Given
diff --git a/tests/providers/celery/executors/test_celery_kubernetes_executor.py b/tests/providers/celery/executors/test_celery_kubernetes_executor.py
index 1ef45ab22d..1950b93690 100644
--- a/tests/providers/celery/executors/test_celery_kubernetes_executor.py
+++ b/tests/providers/celery/executors/test_celery_kubernetes_executor.py
@@ -23,9 +23,9 @@ import pytest
 
 from airflow.callbacks.callback_requests import CallbackRequest
 from airflow.configuration import conf
-from airflow.executors.kubernetes_executor import KubernetesExecutor
 from airflow.providers.celery.executors.celery_executor import CeleryExecutor
 from airflow.providers.celery.executors.celery_kubernetes_executor import CeleryKubernetesExecutor
+from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubernetesExecutor
 
 KUBERNETES_QUEUE = "kubernetes"
 
diff --git a/tests/providers/cncf/kubernetes/__init__.py b/tests/providers/cncf/kubernetes/__init__.py
index 13a83393a9..217e5db960 100644
--- a/tests/providers/cncf/kubernetes/__init__.py
+++ b/tests/providers/cncf/kubernetes/__init__.py
@@ -1,3 +1,4 @@
+#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/tests/kubernetes/basic_pod.yaml b/tests/providers/cncf/kubernetes/basic_pod.yaml
similarity index 100%
rename from tests/kubernetes/basic_pod.yaml
rename to tests/providers/cncf/kubernetes/basic_pod.yaml
diff --git a/airflow/kubernetes/__init__.py b/tests/providers/cncf/kubernetes/executors/__init__.py
similarity index 100%
copy from airflow/kubernetes/__init__.py
copy to tests/providers/cncf/kubernetes/executors/__init__.py
diff --git a/airflow/kubernetes/__init__.py b/tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
similarity index 100%
copy from airflow/kubernetes/__init__.py
copy to tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/__init__.py
diff --git a/tests/executors/kubernetes_executor_template_files/basic_template.yaml b/tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/basic_template.yaml
similarity index 100%
rename from tests/executors/kubernetes_executor_template_files/basic_template.yaml
rename to tests/providers/cncf/kubernetes/executors/kubernetes_executor_template_files/basic_template.yaml
diff --git a/tests/executors/test_kubernetes_executor.py b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
similarity index 89%
rename from tests/executors/test_kubernetes_executor.py
rename to tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
index 4b4685e079..0b9721f117 100644
--- a/tests/executors/test_kubernetes_executor.py
+++ b/tests/providers/cncf/kubernetes/executors/test_kubernetes_executor.py
@@ -31,7 +31,6 @@ from kubernetes.client.rest import ApiException
 from urllib3 import HTTPResponse
 
 from airflow import AirflowException
-from airflow.exceptions import PodReconciliationError
 from airflow.models.taskinstancekey import TaskInstanceKey
 from airflow.operators.bash import BashOperator
 from airflow.operators.empty import EmptyOperator
@@ -40,22 +39,25 @@ from airflow.utils.state import State, TaskInstanceState
 from tests.test_utils.config import conf_vars
 
 try:
-    from airflow.executors.kubernetes_executor import KubernetesExecutor
-    from airflow.executors.kubernetes_executor_types import POD_EXECUTOR_DONE_KEY
-    from airflow.executors.kubernetes_executor_utils import (
+    from airflow.providers.cncf.kubernetes import pod_generator
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import (
+        KubernetesExecutor,
+        PodReconciliationError,
+    )
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_types import POD_EXECUTOR_DONE_KEY
+    from airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils import (
         AirflowKubernetesScheduler,
         KubernetesJobWatcher,
         ResourceVersion,
         create_pod_id,
         get_base_pod_from_template,
     )
-    from airflow.kubernetes import pod_generator
-    from airflow.kubernetes.kubernetes_helper_functions import (
+    from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import (
         annotations_for_logging_task_metadata,
         annotations_to_key,
         get_logs_task_metadata,
     )
-    from airflow.kubernetes.pod_generator import PodGenerator
+    from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
 except ImportError:
     AirflowKubernetesScheduler = None  # type: ignore
 
@@ -108,8 +110,8 @@ class TestAirflowKubernetesScheduler:
     @pytest.mark.skipif(
         AirflowKubernetesScheduler is None, reason="kubernetes python package is not installed"
     )
-    @mock.patch("airflow.kubernetes.pod_generator.PodGenerator")
-    @mock.patch("airflow.executors.kubernetes_executor.KubeConfig")
+    @mock.patch("airflow.providers.cncf.kubernetes.pod_generator.PodGenerator")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubeConfig")
     def test_get_base_pod_from_template(self, mock_kubeconfig, mock_generator):
         # Provide non-existent file path,
         # so None will be passed to deserialize_model_dict().
@@ -125,7 +127,7 @@ class TestAirflowKubernetesScheduler:
 
         # Provide existent file path,
         # so loaded YAML file content should be used to call deserialize_model_dict(), rather than None.
-        path = sys.path[0] + "/tests/kubernetes/pod.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod.yaml"
         with open(path) as stream:
             expected_pod_dict = yaml.safe_load(stream)
 
@@ -160,9 +162,9 @@ class TestAirflowKubernetesScheduler:
     @pytest.mark.skipif(
         AirflowKubernetesScheduler is None, reason="kubernetes python package is not installed"
     )
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
     def test_delete_pod_successfully(self, mock_watcher, mock_client, mock_kube_client):
         pod_name = "my-pod-1"
         namespace = "my-namespace-1"
@@ -182,9 +184,9 @@ class TestAirflowKubernetesScheduler:
     @pytest.mark.skipif(
         AirflowKubernetesScheduler is None, reason="kubernetes python package is not installed"
     )
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
     def test_delete_pod_raises_404(self, mock_watcher, mock_client, mock_kube_client):
         pod_name = "my-pod-1"
         namespace = "my-namespace-2"
@@ -205,9 +207,9 @@ class TestAirflowKubernetesScheduler:
     @pytest.mark.skipif(
         AirflowKubernetesScheduler is None, reason="kubernetes python package is not installed"
     )
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
     def test_delete_pod_404_not_raised(self, mock_watcher, mock_client, mock_kube_client):
         pod_name = "my-pod-1"
         namespace = "my-namespace-3"
@@ -249,8 +251,8 @@ class TestKubernetesExecutor:
             pytest.param(400, False, id="400 BadRequest"),
         ],
     )
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_run_next_exception_requeue(
         self, mock_get_kube_client, mock_kubernetes_job_watcher, status, should_requeue
     ):
@@ -268,7 +270,7 @@ class TestKubernetesExecutor:
         - 400 BadRequest is returned when your parameters are invalid e.g. asking for cpu=100ABC123.
 
         """
-        path = sys.path[0] + "/tests/kubernetes/pod_generator_base_with_secrets.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
 
         response = HTTPResponse(body='{"message": "any message"}', status=status)
 
@@ -319,7 +321,7 @@ class TestKubernetesExecutor:
         AirflowKubernetesScheduler is None, reason="kubernetes python package is not installed"
     )
     @mock.patch("airflow.settings.pod_mutation_hook")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_run_next_pmh_error(self, mock_get_kube_client, mock_pmh):
         """
         Exception during Pod Mutation Hook execution should be handled gracefully.
@@ -357,13 +359,13 @@ class TestKubernetesExecutor:
     @pytest.mark.skipif(
         AirflowKubernetesScheduler is None, reason="kubernetes python package is not installed"
     )
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_run_next_pod_reconciliation_error(self, mock_get_kube_client, mock_kubernetes_job_watcher):
         """
         When construct_pod raises PodReconciliationError, we should fail the task.
         """
-        path = sys.path[0] + "/tests/kubernetes/pod_generator_base_with_secrets.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
 
         mock_kube_client = mock.patch("kubernetes.client.CoreV1Api", autospec=True)
         fail_msg = "test message"
@@ -393,8 +395,8 @@ class TestKubernetesExecutor:
             finally:
                 kubernetes_executor.end()
 
-    @mock.patch("airflow.executors.kubernetes_executor.KubeConfig")
-    @mock.patch("airflow.executors.kubernetes_executor.KubernetesExecutor.sync")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubeConfig")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor.sync")
     @mock.patch("airflow.executors.base_executor.BaseExecutor.trigger_tasks")
     @mock.patch("airflow.executors.base_executor.Stats.gauge")
     def test_gauge_executor_metrics(self, mock_stats_gauge, mock_trigger_tasks, mock_sync, mock_kube_config):
@@ -417,8 +419,8 @@ class TestKubernetesExecutor:
         ]
         mock_stats_gauge.assert_has_calls(calls)
 
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_invalid_executor_config(self, mock_get_kube_client, mock_kubernetes_job_watcher):
         executor = self.kubernetes_executor
         executor.start()
@@ -443,8 +445,10 @@ class TestKubernetesExecutor:
     @pytest.mark.skipif(
         AirflowKubernetesScheduler is None, reason="kubernetes python package is not installed"
     )
-    @mock.patch("airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.run_pod_async")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.run_pod_async"
+    )
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_pod_template_file_override_in_executor_config(self, mock_get_kube_client, mock_run_pod_async):
         current_folder = pathlib.Path(__file__).parent.resolve()
         template_file = str(
@@ -528,8 +532,8 @@ class TestKubernetesExecutor:
             finally:
                 executor.end()
 
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_change_state_running(self, mock_get_kube_client, mock_kubernetes_job_watcher):
         executor = self.kubernetes_executor
         executor.start()
@@ -542,9 +546,11 @@ class TestKubernetesExecutor:
         finally:
             executor.end()
 
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.delete_pod")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.delete_pod"
+    )
     def test_change_state_success(self, mock_delete_pod, mock_get_kube_client, mock_kubernetes_job_watcher):
         executor = self.kubernetes_executor
         executor.start()
@@ -558,9 +564,11 @@ class TestKubernetesExecutor:
         finally:
             executor.end()
 
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler"
+    )
     def test_change_state_failed_no_deletion(
         self, mock_kubescheduler, mock_get_kube_client, mock_kubernetes_job_watcher
     ):
@@ -584,9 +592,11 @@ class TestKubernetesExecutor:
     @pytest.mark.parametrize(
         "ti_state", [TaskInstanceState.SUCCESS, TaskInstanceState.FAILED, TaskInstanceState.DEFERRED]
     )
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.delete_pod")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.delete_pod"
+    )
     def test_change_state_none(
         self,
         mock_delete_pod,
@@ -616,7 +626,7 @@ class TestKubernetesExecutor:
             pytest.param(None, ["ALL_NAMESPACES"]),
         ],
     )
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_watchers_under_multi_namespace_mode(
         self, mock_get_kube_client, multi_namespace_mode_namespace_list, watchers_keys
     ):
@@ -632,9 +642,11 @@ class TestKubernetesExecutor:
         finally:
             executor.end()
 
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler"
+    )
     def test_change_state_skip_pod_deletion(
         self, mock_kubescheduler, mock_get_kube_client, mock_kubernetes_job_watcher
     ):
@@ -656,9 +668,11 @@ class TestKubernetesExecutor:
         finally:
             executor.end()
 
-    @mock.patch("airflow.executors.kubernetes_executor_utils.KubernetesJobWatcher")
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler")
+    @mock.patch("airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.KubernetesJobWatcher")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler"
+    )
     def test_change_state_failed_pod_deletion(
         self, mock_kubescheduler, mock_get_kube_client, mock_kubernetes_job_watcher
     ):
@@ -679,8 +693,12 @@ class TestKubernetesExecutor:
         finally:
             executor.end()
 
-    @mock.patch("airflow.executors.kubernetes_executor.KubernetesExecutor.adopt_launched_task")
-    @mock.patch("airflow.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor.adopt_launched_task"
+    )
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods"
+    )
     def test_try_adopt_task_instances(self, mock_adopt_completed_pods, mock_adopt_launched_task):
         executor = self.kubernetes_executor
         executor.scheduler_job_id = "10"
@@ -731,7 +749,9 @@ class TestKubernetesExecutor:
         mock_adopt_completed_pods.assert_called_once()
         assert reset_tis == []  # This time our return is empty - no TIs to reset
 
-    @mock.patch("airflow.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods"
+    )
     def test_try_adopt_task_instances_multiple_scheduler_ids(self, mock_adopt_completed_pods):
         """We try to find pods only once per scheduler id"""
         executor = self.kubernetes_executor
@@ -762,8 +782,12 @@ class TestKubernetesExecutor:
             any_order=True,
         )
 
-    @mock.patch("airflow.executors.kubernetes_executor.KubernetesExecutor.adopt_launched_task")
-    @mock.patch("airflow.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor.adopt_launched_task"
+    )
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor._adopt_completed_pods"
+    )
     def test_try_adopt_task_instances_no_matching_pods(
         self, mock_adopt_completed_pods, mock_adopt_launched_task
     ):
@@ -778,7 +802,7 @@ class TestKubernetesExecutor:
         mock_adopt_launched_task.assert_not_called()
         mock_adopt_completed_pods.assert_called_once()
 
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_adopt_launched_task(self, mock_kube_client):
         executor = self.kubernetes_executor
         executor.scheduler_job_id = "modified"
@@ -803,7 +827,7 @@ class TestKubernetesExecutor:
         assert tis_to_flush_by_key == {}
         assert executor.running == {ti_key}
 
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_adopt_launched_task_api_exception(self, mock_kube_client):
         """We shouldn't think we are running the task if aren't able to patch the pod"""
         executor = self.kubernetes_executor
@@ -828,7 +852,7 @@ class TestKubernetesExecutor:
         assert tis_to_flush_by_key == {ti_key: {}}
         assert executor.running == set()
 
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_adopt_completed_pods(self, mock_kube_client):
         """We should adopt all completed pods from other schedulers"""
         executor = self.kubernetes_executor
@@ -878,7 +902,7 @@ class TestKubernetesExecutor:
         )
         assert executor.running == expected_running_ti_keys
 
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_not_adopt_unassigned_task(self, mock_kube_client):
         """
         We should not adopt any tasks that were not assigned by the scheduler.
@@ -904,8 +928,10 @@ class TestKubernetesExecutor:
         assert not mock_kube_client.patch_namespaced_pod.called
         assert tis_to_flush_by_key == {"foobar": {}}
 
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
-    @mock.patch("airflow.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.delete_pod")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.AirflowKubernetesScheduler.delete_pod"
+    )
     def test_cleanup_stuck_queued_tasks(self, mock_delete_pod, mock_kube_client, dag_maker, session):
         """Delete any pods associated with a task stuck in queued."""
         executor = KubernetesExecutor()
@@ -1129,7 +1155,7 @@ class TestKubernetesExecutor:
         assert ti0.state == State.SCHEDULED
         assert ti1.state == State.QUEUED
 
-    @mock.patch("airflow.kubernetes.kube_client.get_kube_client")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_get_task_log(self, mock_get_kube_client, create_task_instance_of_operator):
         """fetch task log from pod"""
         mock_kube_client = mock_get_kube_client.return_value
@@ -1232,7 +1258,9 @@ class TestKubernetesJobWatcher:
         self.events = []
 
     def _run(self):
-        with mock.patch("airflow.executors.kubernetes_executor_utils.watch") as mock_watch:
+        with mock.patch(
+            "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.watch"
+        ) as mock_watch:
             mock_watch.Watch.return_value.stream.return_value = self.events
             latest_resource_version = self.watcher._run(
                 self.kube_client,
@@ -1362,7 +1390,9 @@ class TestKubernetesJobWatcher:
 
         self.watcher._run = mock_underscore_run
 
-        with mock.patch("airflow.executors.kubernetes_executor_utils.get_kube_client"):
+        with mock.patch(
+            "airflow.providers.cncf.kubernetes.executors.kubernetes_executor_utils.get_kube_client"
+        ):
             try:
                 # self.watcher._run() is mocked and return "500" as last resource_version
                 self.watcher.run()
diff --git a/tests/executors/test_local_kubernetes_executor.py b/tests/providers/cncf/kubernetes/executors/test_local_kubernetes_executor.py
similarity index 97%
rename from tests/executors/test_local_kubernetes_executor.py
rename to tests/providers/cncf/kubernetes/executors/test_local_kubernetes_executor.py
index e4d2fdd371..ca40c95e45 100644
--- a/tests/executors/test_local_kubernetes_executor.py
+++ b/tests/providers/cncf/kubernetes/executors/test_local_kubernetes_executor.py
@@ -22,7 +22,9 @@ from unittest import mock
 from airflow.callbacks.callback_requests import CallbackRequest
 from airflow.configuration import conf
 from airflow.executors.local_executor import LocalExecutor
-from airflow.executors.local_kubernetes_executor import LocalKubernetesExecutor
+from airflow.providers.cncf.kubernetes.executors.local_kubernetes_executor import (
+    LocalKubernetesExecutor,
+)
 
 
 class TestLocalKubernetesExecutor:
diff --git a/tests/kubernetes/kube_config b/tests/providers/cncf/kubernetes/kube_config
similarity index 100%
rename from tests/kubernetes/kube_config
rename to tests/providers/cncf/kubernetes/kube_config
diff --git a/tests/kubernetes/models/__init__.py b/tests/providers/cncf/kubernetes/models/__init__.py
similarity index 100%
rename from tests/kubernetes/models/__init__.py
rename to tests/providers/cncf/kubernetes/models/__init__.py
diff --git a/tests/kubernetes/models/test_secret.py b/tests/providers/cncf/kubernetes/models/test_secret.py
similarity index 93%
rename from tests/kubernetes/models/test_secret.py
rename to tests/providers/cncf/kubernetes/models/test_secret.py
index df7e4b7817..d6b8a38c2d 100644
--- a/tests/kubernetes/models/test_secret.py
+++ b/tests/providers/cncf/kubernetes/models/test_secret.py
@@ -22,9 +22,9 @@ from unittest import mock
 
 from kubernetes.client import ApiClient, models as k8s
 
-from airflow.kubernetes.k8s_model import append_to_pod
-from airflow.kubernetes.pod_generator import PodGenerator
-from airflow.kubernetes.secret import Secret
+from airflow.providers.cncf.kubernetes.k8s_model import append_to_pod
+from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
+from airflow.providers.cncf.kubernetes.secret import Secret
 
 
 class TestSecret:
@@ -63,13 +63,13 @@ class TestSecret:
         )
 
     @mock.patch("uuid.uuid4")
-    @mock.patch("airflow.kubernetes.pod_generator.rand_str")
+    @mock.patch("airflow.providers.cncf.kubernetes.pod_generator.rand_str")
     def test_attach_to_pod(self, mock_rand_str, mock_uuid):
         static_uuid = uuid.UUID("cf4a56d2-8101-4217-b027-2af6216feb48")
         mock_uuid.return_value = static_uuid
         rand_str = "abcd1234"
         mock_rand_str.return_value = rand_str
-        path = sys.path[0] + "/tests/kubernetes/pod_generator_base.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base.yaml"
         pod = PodGenerator(pod_template_file=path).ud_pod
         secrets = [
             # This should be a secretRef
diff --git a/tests/providers/cncf/kubernetes/operators/test_pod.py b/tests/providers/cncf/kubernetes/operators/test_pod.py
index 15ca6553cd..4383ae32f6 100644
--- a/tests/providers/cncf/kubernetes/operators/test_pod.py
+++ b/tests/providers/cncf/kubernetes/operators/test_pod.py
@@ -29,10 +29,10 @@ from urllib3 import HTTPResponse
 from urllib3.packages.six import BytesIO
 
 from airflow.exceptions import AirflowException, AirflowSkipException, TaskDeferred
-from airflow.kubernetes.secret import Secret
 from airflow.models import DAG, DagModel, DagRun, TaskInstance
 from airflow.models.xcom import XCom
 from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator, _optionally_suppress
+from airflow.providers.cncf.kubernetes.secret import Secret
 from airflow.providers.cncf.kubernetes.triggers.pod import KubernetesPodTrigger
 from airflow.providers.cncf.kubernetes.utils.pod_manager import PodPhase
 from airflow.providers.cncf.kubernetes.utils.xcom_sidecar import PodDefaults
diff --git a/tests/kubernetes/pod.yaml b/tests/providers/cncf/kubernetes/pod.yaml
similarity index 100%
rename from tests/kubernetes/pod.yaml
rename to tests/providers/cncf/kubernetes/pod.yaml
diff --git a/tests/kubernetes/pod_generator_base.yaml b/tests/providers/cncf/kubernetes/pod_generator_base.yaml
similarity index 100%
rename from tests/kubernetes/pod_generator_base.yaml
rename to tests/providers/cncf/kubernetes/pod_generator_base.yaml
diff --git a/tests/kubernetes/pod_generator_base_with_secrets.yaml b/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml
similarity index 100%
rename from tests/kubernetes/pod_generator_base_with_secrets.yaml
rename to tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml
diff --git a/tests/kubernetes/test_client.py b/tests/providers/cncf/kubernetes/test_client.py
similarity index 87%
rename from tests/kubernetes/test_client.py
rename to tests/providers/cncf/kubernetes/test_client.py
index 95a1a5419a..311c352e21 100644
--- a/tests/kubernetes/test_client.py
+++ b/tests/providers/cncf/kubernetes/test_client.py
@@ -22,25 +22,29 @@ from unittest import mock
 from kubernetes.client import Configuration
 from urllib3.connection import HTTPConnection, HTTPSConnection
 
-from airflow.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive, get_kube_client
+from airflow.providers.cncf.kubernetes.kube_client import (
+    _disable_verify_ssl,
+    _enable_tcp_keepalive,
+    get_kube_client,
+)
 from tests.test_utils.config import conf_vars
 
 
 class TestClient:
-    @mock.patch("airflow.kubernetes.kube_client.config")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.config")
     def test_load_cluster_config(self, config):
         get_kube_client(in_cluster=True)
         assert config.load_incluster_config.called
         assert config.load_kube_config.not_called
 
-    @mock.patch("airflow.kubernetes.kube_client.config")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.config")
     def test_load_file_config(self, config):
         get_kube_client(in_cluster=False)
         assert config.load_incluster_config.not_called
         assert config.load_kube_config.called
 
-    @mock.patch("airflow.kubernetes.kube_client.config")
-    @mock.patch("airflow.kubernetes.kube_client.conf")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.config")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.conf")
     def test_load_config_disable_ssl(self, conf, config):
         conf.getboolean.return_value = False
         conf.getjson.return_value = {"total": 3, "backoff_factor": 0.5}
@@ -48,8 +52,8 @@ class TestClient:
         conf.getboolean.assert_called_with("kubernetes_executor", "verify_ssl")
         assert not client.api_client.configuration.verify_ssl
 
-    @mock.patch("airflow.kubernetes.kube_client.config")
-    @mock.patch("airflow.kubernetes.kube_client.conf")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.config")
+    @mock.patch("airflow.providers.cncf.kubernetes.kube_client.conf")
     def test_load_config_ssl_ca_cert(self, conf, config):
         conf.get.return_value = "/path/to/ca.crt"
         conf.getjson.return_value = {"total": 3, "backoff_factor": 0.5}
diff --git a/tests/kubernetes/test_kubernetes_helper_functions.py b/tests/providers/cncf/kubernetes/test_kubernetes_helper_functions.py
similarity index 98%
rename from tests/kubernetes/test_kubernetes_helper_functions.py
rename to tests/providers/cncf/kubernetes/test_kubernetes_helper_functions.py
index 940ee804ef..d90ac92b6c 100644
--- a/tests/kubernetes/test_kubernetes_helper_functions.py
+++ b/tests/providers/cncf/kubernetes/test_kubernetes_helper_functions.py
@@ -22,7 +22,7 @@ import re
 import pytest
 from pytest import param
 
-from airflow.kubernetes.kubernetes_helper_functions import create_pod_id
+from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_pod_id
 from airflow.providers.cncf.kubernetes.operators.pod import _create_pod_id
 
 pod_name_regex = r"^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
diff --git a/tests/kubernetes/test_pod_generator.py b/tests/providers/cncf/kubernetes/test_pod_generator.py
similarity index 95%
rename from tests/kubernetes/test_pod_generator.py
rename to tests/providers/cncf/kubernetes/test_pod_generator.py
index 578e8e5edd..0945d87e4e 100644
--- a/tests/kubernetes/test_pod_generator.py
+++ b/tests/providers/cncf/kubernetes/test_pod_generator.py
@@ -29,15 +29,16 @@ from kubernetes.client import ApiClient, models as k8s
 from pytest import param
 
 from airflow import __version__
-from airflow.exceptions import AirflowConfigException, PodReconciliationError
-from airflow.kubernetes.pod_generator import (
+from airflow.exceptions import AirflowConfigException
+from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import PodReconciliationError
+from airflow.providers.cncf.kubernetes.pod_generator import (
     PodDefaults,
     PodGenerator,
     datetime_to_label_safe_datestring,
     extend_object_field,
     merge_objects,
 )
-from airflow.kubernetes.secret import Secret
+from airflow.providers.cncf.kubernetes.secret import Secret
 
 now = pendulum.now("UTC")
 
@@ -162,14 +163,14 @@ class TestPodGenerator:
             ),
         )
 
-    @mock.patch("airflow.kubernetes.kubernetes_helper_functions.rand_str")
+    @mock.patch("airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str")
     def test_gen_pod_extract_xcom(self, mock_rand_str):
         """
         Method gen_pod is used nowhere in codebase and is deprecated.
         This test is only retained for backcompat.
         """
         mock_rand_str.return_value = self.rand_str
-        path = sys.path[0] + "/tests/kubernetes/pod_generator_base_with_secrets.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
 
         pod_generator = PodGenerator(pod_template_file=path, extract_xcom=True)
         result = pod_generator.gen_pod()
@@ -327,7 +328,7 @@ class TestPodGenerator:
         } == result
 
     def test_reconcile_pods_empty_mutator_pod(self):
-        path = sys.path[0] + "/tests/kubernetes/pod_generator_base_with_secrets.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
         pod_generator = PodGenerator(pod_template_file=path, extract_xcom=True)
         base_pod = pod_generator.ud_pod
         mutator_pod = None
@@ -338,10 +339,10 @@ class TestPodGenerator:
         result = PodGenerator.reconcile_pods(base_pod, mutator_pod)
         assert base_pod == result
 
-    @mock.patch("airflow.kubernetes.kubernetes_helper_functions.rand_str")
+    @mock.patch("airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str")
     def test_reconcile_pods(self, mock_rand_str):
         mock_rand_str.return_value = self.rand_str
-        path = sys.path[0] + "/tests/kubernetes/pod_generator_base_with_secrets.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
 
         base_pod = PodGenerator(pod_template_file=path, extract_xcom=False).ud_pod
 
@@ -402,7 +403,7 @@ class TestPodGenerator:
         ],
     )
     def test_construct_pod(self, config_image, expected_image):
-        template_file = sys.path[0] + "/tests/kubernetes/pod_generator_base_with_secrets.yaml"
+        template_file = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
         worker_config = PodGenerator.deserialize_model_file(template_file)
         executor_config = k8s.V1Pod(
             spec=k8s.V1PodSpec(
@@ -448,7 +449,7 @@ class TestPodGenerator:
         assert expected_dict == result_dict
 
     def test_construct_pod_mapped_task(self):
-        template_file = sys.path[0] + "/tests/kubernetes/pod_generator_base.yaml"
+        template_file = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base.yaml"
         worker_config = PodGenerator.deserialize_model_file(template_file)
         result = PodGenerator.construct_pod(
             dag_id=self.dag_id,
@@ -482,7 +483,7 @@ class TestPodGenerator:
         assert result_dict == expected_dict
 
     def test_construct_pod_empty_executor_config(self):
-        path = sys.path[0] + "/tests/kubernetes/pod_generator_base_with_secrets.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
         worker_config = PodGenerator.deserialize_model_file(path)
         executor_config = None
 
@@ -513,13 +514,13 @@ class TestPodGenerator:
         worker_config_result = self.k8s_client.sanitize_for_serialization(worker_config)
         assert sanitized_result == worker_config_result
 
-    @mock.patch("airflow.kubernetes.kubernetes_helper_functions.rand_str")
+    @mock.patch("airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str")
     def test_construct_pod_attribute_error(self, mock_rand_str):
         """
         After upgrading k8s library we might get attribute error.
         In this case it should raise PodReconciliationError
         """
-        path = sys.path[0] + "/tests/kubernetes/pod_generator_base_with_secrets.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod_generator_base_with_secrets.yaml"
         worker_config = PodGenerator.deserialize_model_file(path)
         mock_rand_str.return_value = self.rand_str
         executor_config = MagicMock()
@@ -540,7 +541,7 @@ class TestPodGenerator:
                 scheduler_job_id="uuid",
             )
 
-    @mock.patch("airflow.kubernetes.kubernetes_helper_functions.rand_str")
+    @mock.patch("airflow.providers.cncf.kubernetes.kubernetes_helper_functions.rand_str")
     def test_ensure_max_identifier_length(self, mock_rand_str):
         mock_rand_str.return_value = self.rand_str
         path = os.path.join(os.path.dirname(__file__), "pod_generator_base_with_secrets.yaml")
@@ -703,14 +704,14 @@ class TestPodGenerator:
         assert res.init_containers == base_spec.init_containers + client_spec.init_containers
 
     def test_deserialize_model_file(self, caplog):
-        path = sys.path[0] + "/tests/kubernetes/pod.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/pod.yaml"
         result = PodGenerator.deserialize_model_file(path)
         sanitized_res = self.k8s_client.sanitize_for_serialization(result)
         assert sanitized_res == self.deserialize_result
         assert len(caplog.records) == 0
 
     def test_deserialize_non_existent_model_file(self, caplog):
-        path = sys.path[0] + "/tests/kubernetes/non_existent.yaml"
+        path = sys.path[0] + "/tests/providers/cncf/kubernetes/non_existent.yaml"
         result = PodGenerator.deserialize_model_file(path)
         sanitized_res = self.k8s_client.sanitize_for_serialization(result)
         assert sanitized_res == {}
diff --git a/tests/providers/cncf/kubernetes/test_template_rendering.py b/tests/providers/cncf/kubernetes/test_template_rendering.py
new file mode 100644
index 0000000000..82a5d80102
--- /dev/null
+++ b/tests/providers/cncf/kubernetes/test_template_rendering.py
@@ -0,0 +1,156 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from unittest import mock
+
+from sqlalchemy.orm import make_transient
+
+from airflow.configuration import TEST_DAGS_FOLDER
+from airflow.models.renderedtifields import RenderedTaskInstanceFields, RenderedTaskInstanceFields as RTIF
+from airflow.operators.bash import BashOperator
+from airflow.utils.session import create_session
+from airflow.version import version
+from tests.models import DEFAULT_DATE
+
+
+@mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"})
+@mock.patch("airflow.settings.pod_mutation_hook")
+def test_render_k8s_pod_yaml(pod_mutation_hook, create_task_instance):
+    ti = create_task_instance(
+        dag_id="test_render_k8s_pod_yaml",
+        run_id="test_run_id",
+        task_id="op1",
+        execution_date=DEFAULT_DATE,
+    )
+
+    expected_pod_spec = {
+        "metadata": {
+            "annotations": {
+                "dag_id": "test_render_k8s_pod_yaml",
+                "run_id": "test_run_id",
+                "task_id": "op1",
+                "try_number": "1",
+            },
+            "labels": {
+                "airflow-worker": "0",
+                "airflow_version": version,
+                "dag_id": "test_render_k8s_pod_yaml",
+                "run_id": "test_run_id",
+                "kubernetes_executor": "True",
+                "task_id": "op1",
+                "try_number": "1",
+            },
+            "name": mock.ANY,
+            "namespace": "default",
+        },
+        "spec": {
+            "containers": [
+                {
+                    "args": [
+                        "airflow",
+                        "tasks",
+                        "run",
+                        "test_render_k8s_pod_yaml",
+                        "op1",
+                        "test_run_id",
+                        "--subdir",
+                        __file__,
+                    ],
+                    "name": "base",
+                    "env": [{"name": "AIRFLOW_IS_K8S_EXECUTOR_POD", "value": "True"}],
+                }
+            ]
+        },
+    }
+
+    assert ti.render_k8s_pod_yaml() == expected_pod_spec
+    pod_mutation_hook.assert_called_once_with(mock.ANY)
+
+
+@mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"})
+@mock.patch.object(RenderedTaskInstanceFields, "get_k8s_pod_yaml")
+@mock.patch("airflow.providers.cncf.kubernetes.template_rendering.render_k8s_pod_yaml")
+def test_get_rendered_k8s_spec(render_k8s_pod_yaml, rtif_get_k8s_pod_yaml, create_task_instance):
+    # Create new TI for the same Task
+    ti = create_task_instance()
+
+    mock.patch.object(ti, "render_k8s_pod_yaml", autospec=True)
+
+    fake_spec = {"ermagawds": "pods"}
+
+    session = mock.Mock()
+
+    rtif_get_k8s_pod_yaml.return_value = fake_spec
+    assert ti.get_rendered_k8s_spec(session) == fake_spec
+
+    rtif_get_k8s_pod_yaml.assert_called_once_with(ti, session=session)
+    render_k8s_pod_yaml.assert_not_called()
+
+    # Now test that when we _dont_ find it in the DB, it calls render_k8s_pod_yaml
+    rtif_get_k8s_pod_yaml.return_value = None
+    render_k8s_pod_yaml.return_value = fake_spec
+
+    assert ti.get_rendered_k8s_spec(session) == fake_spec
+
+    render_k8s_pod_yaml.assert_called_once()
+
+
+@mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"})
+@mock.patch("airflow.utils.log.secrets_masker.redact", autospec=True, side_effect=lambda d, _=None: d)
+@mock.patch("airflow.providers.cncf.kubernetes.template_rendering.render_k8s_pod_yaml")
+def test_get_k8s_pod_yaml(render_k8s_pod_yaml, redact, dag_maker):
+    """
+    Test that k8s_pod_yaml is rendered correctly, stored in the Database,
+    and are correctly fetched using RTIF.get_k8s_pod_yaml
+    """
+    with dag_maker("test_get_k8s_pod_yaml") as dag:
+        task = BashOperator(task_id="test", bash_command="echo hi")
+    dr = dag_maker.create_dagrun()
+    dag.fileloc = TEST_DAGS_FOLDER + "/test_get_k8s_pod_yaml.py"
+
+    ti = dr.task_instances[0]
+    ti.task = task
+
+    render_k8s_pod_yaml.return_value = {"I'm a": "pod"}
+
+    rtif = RTIF(ti=ti)
+
+    assert ti.dag_id == rtif.dag_id
+    assert ti.task_id == rtif.task_id
+    assert ti.run_id == rtif.run_id
+
+    expected_pod_yaml = {"I'm a": "pod"}
+
+    assert rtif.k8s_pod_yaml == render_k8s_pod_yaml.return_value
+    # K8s pod spec dict was passed to redact
+    redact.assert_any_call(rtif.k8s_pod_yaml)
+
+    with create_session() as session:
+        session.add(rtif)
+        session.flush()
+
+        assert expected_pod_yaml == RTIF.get_k8s_pod_yaml(ti=ti, session=session)
+        make_transient(ti)
+        # "Delete" it from the DB
+        session.rollback()
+
+        # Test the else part of get_k8s_pod_yaml
+        # i.e. for the TIs that are not stored in RTIF table
+        # Fetching them will return None
+        assert RTIF.get_k8s_pod_yaml(ti=ti, session=session) is None
diff --git a/tests/sensors/test_base.py b/tests/sensors/test_base.py
index b4042945d3..e4e6ac5ad5 100644
--- a/tests/sensors/test_base.py
+++ b/tests/sensors/test_base.py
@@ -35,15 +35,17 @@ from airflow.executors.executor_constants import (
     LOCAL_KUBERNETES_EXECUTOR,
     SEQUENTIAL_EXECUTOR,
 )
-from airflow.executors.kubernetes_executor import KubernetesExecutor
 from airflow.executors.local_executor import LocalExecutor
-from airflow.executors.local_kubernetes_executor import LocalKubernetesExecutor
 from airflow.executors.sequential_executor import SequentialExecutor
 from airflow.models import TaskReschedule
 from airflow.models.xcom import XCom
 from airflow.operators.empty import EmptyOperator
 from airflow.providers.celery.executors.celery_executor import CeleryExecutor
 from airflow.providers.celery.executors.celery_kubernetes_executor import CeleryKubernetesExecutor
+from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubernetesExecutor
+from airflow.providers.cncf.kubernetes.executors.local_kubernetes_executor import (
+    LocalKubernetesExecutor,
+)
 from airflow.sensors.base import BaseSensorOperator, PokeReturnValue, poke_mode_only
 from airflow.ti_deps.deps.ready_to_reschedule import ReadyToRescheduleDep
 from airflow.utils import timezone
diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py
index c449a6f998..c89122879e 100644
--- a/tests/serialization/test_dag_serialization.py
+++ b/tests/serialization/test_dag_serialization.py
@@ -42,7 +42,6 @@ from airflow.decorators import teardown
 from airflow.decorators.base import DecoratedOperator
 from airflow.exceptions import AirflowException, SerializationError
 from airflow.hooks.base import BaseHook
-from airflow.kubernetes.pod_generator import PodGenerator
 from airflow.models import DAG, Connection, DagBag, Operator
 from airflow.models.baseoperator import BaseOperator, BaseOperatorLink
 from airflow.models.expandinput import EXPAND_INPUT_EMPTY
@@ -51,6 +50,7 @@ from airflow.models.param import Param, ParamsDict
 from airflow.models.xcom import XCom
 from airflow.operators.bash import BashOperator
 from airflow.operators.empty import EmptyOperator
+from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
 from airflow.security import permissions
 from airflow.sensors.bash import BashSensor
 from airflow.serialization.json_schema import load_dag_schema_dict
@@ -1998,7 +1998,7 @@ def test_kubernetes_optional():
         spec.loader.exec_module(module)
 
         # if we got this far, the module did not try to load kubernetes, but
-        # did it try to access airflow.kubernetes.*?
+        # did it try to access airflow.providers.cncf.kubernetes.*?
         imported_airflow = {
             c.args[0].split(".", 2)[1] for c in import_mock.call_args_list if c.args[0].startswith("airflow.")
         }
diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes.py b/tests/system/providers/cncf/kubernetes/example_kubernetes.py
index 74078541bd..ef7d6db19a 100644
--- a/tests/system/providers/cncf/kubernetes/example_kubernetes.py
+++ b/tests/system/providers/cncf/kubernetes/example_kubernetes.py
@@ -26,9 +26,9 @@ from datetime import datetime
 from kubernetes.client import models as k8s
 
 from airflow import DAG
-from airflow.kubernetes.secret import Secret
 from airflow.operators.bash import BashOperator
 from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
+from airflow.providers.cncf.kubernetes.secret import Secret
 
 # [START howto_operator_k8s_cluster_resources]
 secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn")
diff --git a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py b/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py
index 36e143c096..0b0220f822 100644
--- a/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py
+++ b/tests/system/providers/cncf/kubernetes/example_kubernetes_async.py
@@ -27,9 +27,9 @@ from datetime import datetime
 from kubernetes.client import models as k8s
 
 from airflow import DAG
-from airflow.kubernetes.secret import Secret
 from airflow.operators.bash import BashOperator
 from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator
+from airflow.providers.cncf.kubernetes.secret import Secret
 
 # [START howto_operator_k8s_cluster_resources]
 secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn")
diff --git a/tests/utils/test_log_handlers.py b/tests/utils/test_log_handlers.py
index aa40a6f45d..8c772c5799 100644
--- a/tests/utils/test_log_handlers.py
+++ b/tests/utils/test_log_handlers.py
@@ -272,7 +272,9 @@ class TestFileTaskLogHandler:
                 ["file1 content", "file2 content"],
             )
 
-    @mock.patch("airflow.executors.kubernetes_executor.KubernetesExecutor.get_task_log")
+    @mock.patch(
+        "airflow.providers.cncf.kubernetes.executors.kubernetes_executor.KubernetesExecutor.get_task_log"
+    )
     @pytest.mark.parametrize("state", [TaskInstanceState.RUNNING, TaskInstanceState.SUCCESS])
     def test__read_for_k8s_executor(self, mock_k8s_get_task_log, create_task_instance, state):
         """Test for k8s executor, the log is read from get_task_log method"""
@@ -372,7 +374,7 @@ class TestFileTaskLogHandler:
         ],
     )
     @patch.dict("os.environ", AIRFLOW__CORE__EXECUTOR="KubernetesExecutor")
-    @patch("airflow.kubernetes.kube_client.get_kube_client")
+    @patch("airflow.providers.cncf.kubernetes.kube_client.get_kube_client")
     def test_read_from_k8s_under_multi_namespace_mode(
         self, mock_kube_client, pod_override, namespace_to_call
     ):