You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/09/13 17:20:54 UTC

[airflow] branch main updated: Apply PEP-563 (Postponed Evaluation of Annotations) to non-core airflow (#26289)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 06acf40a43 Apply PEP-563 (Postponed Evaluation of Annotations) to non-core airflow (#26289)
06acf40a43 is described below

commit 06acf40a4337759797f666d5bb27a5a393b74fed
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Tue Sep 13 19:20:45 2022 +0200

    Apply PEP-563 (Postponed Evaluation of Annotations) to non-core airflow (#26289)
    
    This PR applies PEP-563 to all non-core airflow Python files - i.e.
    those that we usually do not cherry-pick to v2-* branches. There
    will be a follow-up to apply the "core changes" right after with
    the intention to be cherry-picked to v2-* branch before 2.4.0
    release candidate is ready, in order to make cherry-picking
    easier.
    
    This PR is a result of combining some of the 47 PRs reviewed
    and approved separately (otherwise it would have been unreviewable)
    
    The history of those PRs can be changed in:
    https://github.com/apache/airflow/pulls?q=is%3Apr+label%3Afuture-annotations+is%3Aopen
    
    Relevant discussion: https://lists.apache.org/thread/81fr042s5d3v17v83bpo24tnrr2pp0fp
    Lazy consensus call: https://lists.apache.org/thread/l74nvjh8tgbtojllhwkcn7f8mfnlz4jq
---
 .pre-commit-config.yaml                            |   4 +-
 airflow/providers/airbyte/hooks/airbyte.py         |   8 +-
 airflow/providers/airbyte/operators/airbyte.py     |  10 +-
 airflow/providers/airbyte/sensors/airbyte.py       |   4 +-
 airflow/providers/alibaba/cloud/hooks/oss.py       |  38 +-
 .../alibaba/cloud/log/oss_task_handler.py          |   2 +
 airflow/providers/alibaba/cloud/operators/oss.py   |  29 +-
 airflow/providers/alibaba/cloud/sensors/oss_key.py |  11 +-
 .../amazon/aws/example_dags/example_appflow.py     |   1 +
 .../amazon/aws/example_dags/example_dms.py         |   1 +
 .../aws/example_dags/example_dynamodb_to_s3.py     |   2 +
 .../amazon/aws/example_dags/example_ec2.py         |   2 +
 .../amazon/aws/example_dags/example_ecs.py         |   1 +
 .../aws/example_dags/example_eks_templated.py      |  11 +-
 .../example_eks_with_fargate_in_one_step.py        |   7 +-
 .../example_eks_with_fargate_profile.py            |   7 +-
 .../example_eks_with_nodegroup_in_one_step.py      |   7 +-
 .../example_dags/example_eks_with_nodegroups.py    |   7 +-
 .../amazon/aws/example_dags/example_emr.py         |   2 +
 .../amazon/aws/example_dags/example_emr_eks.py     |   1 +
 .../amazon/aws/example_dags/example_ftp_to_s3.py   |   2 +-
 .../amazon/aws/example_dags/example_gcs_to_s3.py   |   1 +
 .../aws/example_dags/example_glacier_to_gcs.py     |   2 +
 .../example_google_api_sheets_to_s3.py             |   1 +
 .../example_google_api_youtube_to_s3.py            |   1 +
 .../aws/example_dags/example_hive_to_dynamodb.py   |   2 +-
 .../example_dags/example_imap_attachment_to_s3.py  |   2 +-
 .../amazon/aws/example_dags/example_mongo_to_s3.py |   2 +-
 .../amazon/aws/example_dags/example_s3.py          |   4 +-
 .../amazon/aws/example_dags/example_s3_to_ftp.py   |   2 +-
 .../aws/example_dags/example_s3_to_redshift.py     |   1 +
 .../amazon/aws/example_dags/example_s3_to_sftp.py  |   1 +
 .../aws/example_dags/example_salesforce_to_s3.py   |   2 +-
 .../amazon/aws/example_dags/example_sftp_to_s3.py  |   2 +-
 .../amazon/aws/example_dags/example_sql_to_s3.py   |   2 +-
 airflow/providers/amazon/aws/exceptions.py         |   3 +-
 airflow/providers/amazon/aws/hooks/appflow.py      |   9 +-
 airflow/providers/amazon/aws/hooks/athena.py       |  35 +-
 airflow/providers/amazon/aws/hooks/base_aws.py     |  80 ++-
 airflow/providers/amazon/aws/hooks/batch_client.py |  43 +-
 .../providers/amazon/aws/hooks/batch_waiters.py    |  15 +-
 .../providers/amazon/aws/hooks/cloud_formation.py  |   7 +-
 airflow/providers/amazon/aws/hooks/datasync.py     |   7 +-
 airflow/providers/amazon/aws/hooks/dms.py          |   9 +-
 airflow/providers/amazon/aws/hooks/dynamodb.py     |   8 +-
 airflow/providers/amazon/aws/hooks/ec2.py          |  11 +-
 airflow/providers/amazon/aws/hooks/ecs.py          |  20 +-
 airflow/providers/amazon/aws/hooks/eks.py          |  49 +-
 .../aws/hooks/elasticache_replication_group.py     |  21 +-
 airflow/providers/amazon/aws/hooks/emr.py          |  38 +-
 airflow/providers/amazon/aws/hooks/glacier.py      |  10 +-
 airflow/providers/amazon/aws/hooks/glue.py         |  32 +-
 airflow/providers/amazon/aws/hooks/glue_catalog.py |  13 +-
 airflow/providers/amazon/aws/hooks/glue_crawler.py |   2 +
 airflow/providers/amazon/aws/hooks/kinesis.py      |   3 +-
 .../providers/amazon/aws/hooks/lambda_function.py  |  47 +-
 airflow/providers/amazon/aws/hooks/logs.py         |   7 +-
 airflow/providers/amazon/aws/hooks/quicksight.py   |   1 +
 airflow/providers/amazon/aws/hooks/rds.py          |   1 +
 .../providers/amazon/aws/hooks/redshift_cluster.py |  11 +-
 .../providers/amazon/aws/hooks/redshift_data.py    |   1 +
 airflow/providers/amazon/aws/hooks/redshift_sql.py |  13 +-
 airflow/providers/amazon/aws/hooks/s3.py           | 144 +++---
 airflow/providers/amazon/aws/hooks/sagemaker.py    |  48 +-
 .../providers/amazon/aws/hooks/secrets_manager.py  |   5 +-
 airflow/providers/amazon/aws/hooks/ses.py          |  18 +-
 airflow/providers/amazon/aws/hooks/sns.py          |  10 +-
 airflow/providers/amazon/aws/hooks/sqs.py          |  11 +-
 .../providers/amazon/aws/hooks/step_function.py    |   6 +-
 airflow/providers/amazon/aws/hooks/sts.py          |   1 +
 airflow/providers/amazon/aws/links/base_aws.py     |  11 +-
 airflow/providers/amazon/aws/links/batch.py        |   1 +
 airflow/providers/amazon/aws/links/emr.py          |   1 +
 airflow/providers/amazon/aws/links/logs.py         |   1 +
 .../amazon/aws/log/cloudwatch_task_handler.py      |   5 +-
 .../providers/amazon/aws/log/s3_task_handler.py    |   5 +-
 airflow/providers/amazon/aws/operators/appflow.py  |  37 +-
 airflow/providers/amazon/aws/operators/athena.py   |  17 +-
 .../providers/amazon/aws/operators/aws_lambda.py   |   1 +
 airflow/providers/amazon/aws/operators/batch.py    |  48 +-
 .../amazon/aws/operators/cloud_formation.py        |  10 +-
 airflow/providers/amazon/aws/operators/datasync.py |  42 +-
 airflow/providers/amazon/aws/operators/dms.py      |  30 +-
 airflow/providers/amazon/aws/operators/ec2.py      |  12 +-
 airflow/providers/amazon/aws/operators/ecs.py      |  52 +-
 airflow/providers/amazon/aws/operators/eks.py      |  69 +--
 airflow/providers/amazon/aws/operators/emr.py      |  56 ++-
 airflow/providers/amazon/aws/operators/glacier.py  |   4 +-
 airflow/providers/amazon/aws/operators/glue.py     |  23 +-
 .../providers/amazon/aws/operators/glue_crawler.py |   4 +-
 .../amazon/aws/operators/lambda_function.py        |  15 +-
 .../providers/amazon/aws/operators/quicksight.py   |   7 +-
 airflow/providers/amazon/aws/operators/rds.py      |  47 +-
 .../amazon/aws/operators/redshift_cluster.py       |  58 +--
 .../amazon/aws/operators/redshift_data.py          |  24 +-
 .../providers/amazon/aws/operators/redshift_sql.py |   9 +-
 airflow/providers/amazon/aws/operators/s3.py       |  86 ++--
 .../providers/amazon/aws/operators/sagemaker.py    |  69 +--
 airflow/providers/amazon/aws/operators/sns.py      |   9 +-
 airflow/providers/amazon/aws/operators/sqs.py      |  11 +-
 .../amazon/aws/operators/step_function.py          |  16 +-
 .../amazon/aws/secrets/secrets_manager.py          |  35 +-
 .../amazon/aws/secrets/systems_manager.py          |  12 +-
 airflow/providers/amazon/aws/sensors/athena.py     |   8 +-
 airflow/providers/amazon/aws/sensors/batch.py      |  18 +-
 .../amazon/aws/sensors/cloud_formation.py          |  10 +-
 airflow/providers/amazon/aws/sensors/dms.py        |  13 +-
 airflow/providers/amazon/aws/sensors/ec2.py        |   8 +-
 airflow/providers/amazon/aws/sensors/ecs.py        |  25 +-
 airflow/providers/amazon/aws/sensors/eks.py        |  17 +-
 airflow/providers/amazon/aws/sensors/emr.py        |  50 +-
 airflow/providers/amazon/aws/sensors/glacier.py    |   4 +-
 airflow/providers/amazon/aws/sensors/glue.py       |  12 +-
 .../amazon/aws/sensors/glue_catalog_partition.py   |  10 +-
 .../providers/amazon/aws/sensors/glue_crawler.py   |   8 +-
 airflow/providers/amazon/aws/sensors/quicksight.py |   9 +-
 airflow/providers/amazon/aws/sensors/rds.py        |  19 +-
 .../amazon/aws/sensors/redshift_cluster.py         |   8 +-
 airflow/providers/amazon/aws/sensors/s3.py         |  27 +-
 airflow/providers/amazon/aws/sensors/sagemaker.py  |  15 +-
 airflow/providers/amazon/aws/sensors/sqs.py        |  14 +-
 .../providers/amazon/aws/sensors/step_function.py  |   9 +-
 .../amazon/aws/transfers/dynamodb_to_s3.py         |  14 +-
 .../providers/amazon/aws/transfers/exasol_to_s3.py |  13 +-
 .../providers/amazon/aws/transfers/ftp_to_s3.py    |  16 +-
 .../providers/amazon/aws/transfers/gcs_to_s3.py    |  22 +-
 .../amazon/aws/transfers/glacier_to_gcs.py         |  10 +-
 .../amazon/aws/transfers/google_api_to_s3.py       |  17 +-
 .../amazon/aws/transfers/hive_to_dynamodb.py       |  14 +-
 .../amazon/aws/transfers/imap_attachment_to_s3.py  |   8 +-
 .../providers/amazon/aws/transfers/local_to_s3.py  |  12 +-
 .../providers/amazon/aws/transfers/mongo_to_s3.py  |  16 +-
 .../amazon/aws/transfers/redshift_to_s3.py         |  18 +-
 .../providers/amazon/aws/transfers/s3_to_ftp.py    |   3 +-
 .../amazon/aws/transfers/s3_to_redshift.py         |  15 +-
 .../providers/amazon/aws/transfers/s3_to_sftp.py   |   8 +-
 .../amazon/aws/transfers/salesforce_to_s3.py       |   9 +-
 .../providers/amazon/aws/transfers/sftp_to_s3.py   |   4 +-
 .../providers/amazon/aws/transfers/sql_to_s3.py    |  11 +-
 airflow/providers/amazon/aws/utils/__init__.py     |   4 +-
 .../amazon/aws/utils/connection_wrapper.py         |  81 +--
 .../providers/amazon/aws/utils/eks_get_token.py    |   1 +
 airflow/providers/amazon/aws/utils/emailer.py      |  16 +-
 airflow/providers/amazon/aws/utils/rds.py          |   1 +
 airflow/providers/amazon/aws/utils/redshift.py     |   1 +
 airflow/providers/apache/beam/hooks/beam.py        |  34 +-
 airflow/providers/apache/beam/operators/beam.py    |  72 +--
 .../providers/apache/cassandra/hooks/cassandra.py  |   8 +-
 .../providers/apache/cassandra/sensors/record.py   |   7 +-
 .../providers/apache/cassandra/sensors/table.py    |   4 +-
 airflow/providers/apache/drill/hooks/drill.py      |   7 +-
 airflow/providers/apache/drill/operators/drill.py  |  10 +-
 airflow/providers/apache/druid/hooks/druid.py      |  13 +-
 airflow/providers/apache/druid/operators/druid.py  |   7 +-
 .../apache/druid/operators/druid_check.py          |   2 +
 .../apache/druid/transfers/hive_to_druid.py        |  20 +-
 airflow/providers/apache/hdfs/hooks/hdfs.py        |   6 +-
 airflow/providers/apache/hdfs/hooks/webhdfs.py     |   8 +-
 airflow/providers/apache/hdfs/sensors/hdfs.py      |  22 +-
 airflow/providers/apache/hdfs/sensors/web_hdfs.py  |   4 +-
 airflow/providers/apache/hive/hooks/hive.py        |  72 +--
 airflow/providers/apache/hive/operators/hive.py    |  18 +-
 .../providers/apache/hive/operators/hive_stats.py  |  14 +-
 .../apache/hive/sensors/hive_partition.py          |   8 +-
 .../apache/hive/sensors/metastore_partition.py     |   4 +-
 .../apache/hive/sensors/named_hive_partition.py    |  10 +-
 .../apache/hive/transfers/hive_to_mysql.py         |  13 +-
 .../apache/hive/transfers/hive_to_samba.py         |   4 +-
 .../apache/hive/transfers/mssql_to_hive.py         |  10 +-
 .../apache/hive/transfers/mysql_to_hive.py         |  14 +-
 .../providers/apache/hive/transfers/s3_to_hive.py  |  18 +-
 .../apache/hive/transfers/vertica_to_hive.py       |   8 +-
 airflow/providers/apache/kylin/hooks/kylin.py      |   7 +-
 .../providers/apache/kylin/operators/kylin_cube.py |  23 +-
 airflow/providers/apache/livy/hooks/livy.py        |  73 ++-
 airflow/providers/apache/livy/operators/livy.py    |  51 +-
 airflow/providers/apache/livy/sensors/livy.py      |  15 +-
 airflow/providers/apache/pig/hooks/pig.py          |   8 +-
 airflow/providers/apache/pig/operators/pig.py      |  10 +-
 airflow/providers/apache/pinot/hooks/pinot.py      |  51 +-
 airflow/providers/apache/spark/hooks/spark_jdbc.py |  53 +-
 .../apache/spark/hooks/spark_jdbc_script.py        |   7 +-
 airflow/providers/apache/spark/hooks/spark_sql.py  |  29 +-
 .../providers/apache/spark/hooks/spark_submit.py   |  75 +--
 .../providers/apache/spark/operators/spark_jdbc.py |  53 +-
 .../providers/apache/spark/operators/spark_sql.py  |  27 +-
 .../apache/spark/operators/spark_submit.py         |  51 +-
 airflow/providers/apache/sqoop/hooks/sqoop.py      |  98 ++--
 airflow/providers/apache/sqoop/operators/sqoop.py  |  55 ++-
 .../arangodb/example_dags/example_arangodb.py      |   2 +
 airflow/providers/arangodb/hooks/arangodb.py       |   9 +-
 airflow/providers/arangodb/operators/arangodb.py   |   8 +-
 airflow/providers/arangodb/sensors/arangodb.py     |   4 +-
 airflow/providers/asana/hooks/asana.py             |  19 +-
 airflow/providers/asana/operators/asana_tasks.py   |  15 +-
 airflow/providers/atlassian/jira/hooks/jira.py     |   8 +-
 airflow/providers/atlassian/jira/operators/jira.py |  11 +-
 airflow/providers/atlassian/jira/sensors/jira.py   |  22 +-
 airflow/providers/celery/sensors/celery_queue.py   |   9 +-
 airflow/providers/cloudant/hooks/cloudant.py       |   6 +-
 airflow/providers/cncf/kubernetes/__init__.py      |   2 +
 .../backcompat/backwards_compat_converters.py      |   7 +-
 .../providers/cncf/kubernetes/backcompat/pod.py    |   1 +
 .../kubernetes/backcompat/pod_runtime_info_env.py  |   1 +
 .../providers/cncf/kubernetes/backcompat/volume.py |   1 +
 .../cncf/kubernetes/backcompat/volume_mount.py     |   1 +
 .../cncf/kubernetes/decorators/kubernetes.py       |   9 +-
 .../providers/cncf/kubernetes/hooks/kubernetes.py  |  56 ++-
 .../cncf/kubernetes/operators/kubernetes_pod.py    | 108 ++--
 .../cncf/kubernetes/operators/spark_kubernetes.py  |   8 +-
 .../cncf/kubernetes/python_kubernetes_script.py    |   3 +-
 .../cncf/kubernetes/sensors/spark_kubernetes.py    |   8 +-
 .../providers/cncf/kubernetes/utils/pod_manager.py |  22 +-
 .../cncf/kubernetes/utils/xcom_sidecar.py          |   2 +
 airflow/providers/common/sql/hooks/sql.py          |  30 +-
 airflow/providers/common/sql/operators/sql.py      |  72 +--
 airflow/providers/common/sql/sensors/sql.py        |   1 +
 airflow/providers/databricks/hooks/databricks.py   |  22 +-
 .../providers/databricks/hooks/databricks_base.py  |  22 +-
 .../providers/databricks/hooks/databricks_sql.py   |  29 +-
 .../providers/databricks/operators/databricks.py   |  72 +--
 .../databricks/operators/databricks_repos.py       |  31 +-
 .../databricks/operators/databricks_sql.py         |  62 +--
 .../providers/databricks/triggers/databricks.py    |   6 +-
 airflow/providers/databricks/utils/databricks.py   |   6 +-
 airflow/providers/datadog/hooks/datadog.py         |  35 +-
 airflow/providers/datadog/sensors/datadog.py       |  14 +-
 airflow/providers/dbt/cloud/hooks/dbt.py           |  71 +--
 airflow/providers/dbt/cloud/operators/dbt.py       |  23 +-
 airflow/providers/dbt/cloud/sensors/dbt.py         |   7 +-
 airflow/providers/dingding/hooks/dingding.py       |   8 +-
 airflow/providers/dingding/operators/dingding.py   |  10 +-
 airflow/providers/discord/hooks/discord_webhook.py |  19 +-
 .../providers/discord/operators/discord_webhook.py |  19 +-
 airflow/providers/docker/decorators/docker.py      |  11 +-
 airflow/providers/docker/hooks/docker.py           |  14 +-
 airflow/providers/docker/operators/docker.py       |  68 +--
 airflow/providers/docker/operators/docker_swarm.py |  18 +-
 .../providers/elasticsearch/hooks/elasticsearch.py |  10 +-
 .../elasticsearch/log/es_json_formatter.py         |   1 +
 .../providers/elasticsearch/log/es_task_handler.py |  17 +-
 airflow/providers/exasol/hooks/exasol.py           |  25 +-
 airflow/providers/exasol/operators/exasol.py       |  12 +-
 airflow/providers/facebook/ads/hooks/ads.py        |  20 +-
 airflow/providers/ftp/hooks/ftp.py                 |  14 +-
 airflow/providers/ftp/sensors/ftp.py               |   4 +-
 airflow/providers/github/hooks/github.py           |  11 +-
 airflow/providers/github/operators/github.py       |   9 +-
 airflow/providers/github/sensors/github.py         |  23 +-
 airflow/providers/google/__init__.py               |   2 +
 airflow/providers/google/ads/hooks/ads.py          |  24 +-
 airflow/providers/google/ads/operators/ads.py      |  10 +-
 .../providers/google/ads/transfers/ads_to_gcs.py   |  13 +-
 .../_internal_client/secret_manager_client.py      |   4 +-
 .../example_automl_nl_text_classification.py       |   3 +-
 .../example_automl_nl_text_sentiment.py            |   3 +-
 .../cloud/example_dags/example_automl_tables.py    |   7 +-
 .../example_dags/example_automl_translation.py     |   3 +-
 ...ple_automl_video_intelligence_classification.py |   3 +-
 .../example_automl_video_intelligence_tracking.py  |   3 +-
 .../example_automl_vision_object_detection.py      |   3 +-
 .../cloud/example_dags/example_bigquery_dts.py     |   3 +-
 .../google/cloud/example_dags/example_bigtable.py  |   3 +-
 .../cloud/example_dags/example_cloud_composer.py   |   1 +
 .../cloud/example_dags/example_cloud_sql_query.py  |   3 +-
 .../example_cloud_storage_transfer_service_aws.py  |   2 +-
 .../example_cloud_storage_transfer_service_gcp.py  |   2 +-
 .../cloud/example_dags/example_cloud_task.py       |   2 +-
 .../google/cloud/example_dags/example_compute.py   |   2 +-
 .../cloud/example_dags/example_compute_ssh.py      |   1 +
 .../google/cloud/example_dags/example_dataflow.py  |  11 +-
 .../example_dags/example_dataflow_flex_template.py |   3 +-
 .../cloud/example_dags/example_dataflow_sql.py     |   3 +-
 .../cloud/example_dags/example_datafusion.py       |   3 +-
 .../google/cloud/example_dags/example_dataplex.py  |   2 +-
 .../google/cloud/example_dags/example_dataprep.py  |   2 +
 .../example_dags/example_dataproc_metastore.py     |   1 +
 .../google/cloud/example_dags/example_dlp.py       |   2 +-
 .../example_dags/example_facebook_ads_to_gcs.py    |   2 +
 .../google/cloud/example_dags/example_functions.py |   6 +-
 .../cloud/example_dags/example_gdrive_to_gcs.py    |   1 +
 .../cloud/example_dags/example_gdrive_to_local.py  |   1 +
 .../google/cloud/example_dags/example_looker.py    |   1 +
 .../google/cloud/example_dags/example_mlengine.py  |  11 +-
 .../cloud/example_dags/example_postgres_to_gcs.py  |   2 +
 .../cloud/example_dags/example_presto_to_gcs.py    |   2 +
 .../example_dags/example_salesforce_to_gcs.py      |   3 +-
 .../cloud/example_dags/example_sftp_to_gcs.py      |   1 +
 .../google/cloud/example_dags/example_vertex_ai.py |  12 +-
 .../google/cloud/example_dags/example_vision.py    |   2 +-
 airflow/providers/google/cloud/hooks/automl.py     | 123 ++---
 airflow/providers/google/cloud/hooks/bigquery.py   | 401 ++++++++-------
 .../providers/google/cloud/hooks/bigquery_dts.py   |  45 +-
 airflow/providers/google/cloud/hooks/bigtable.py   |  34 +-
 .../providers/google/cloud/hooks/cloud_build.py    |  98 ++--
 .../providers/google/cloud/hooks/cloud_composer.py |  79 +--
 .../google/cloud/hooks/cloud_memorystore.py        | 134 ++---
 airflow/providers/google/cloud/hooks/cloud_sql.py  |  48 +-
 .../cloud/hooks/cloud_storage_transfer_service.py  |  21 +-
 airflow/providers/google/cloud/hooks/compute.py    |  19 +-
 .../providers/google/cloud/hooks/compute_ssh.py    |  20 +-
 .../providers/google/cloud/hooks/datacatalog.py    | 205 ++++----
 airflow/providers/google/cloud/hooks/dataflow.py   |  86 ++--
 airflow/providers/google/cloud/hooks/dataform.py   |  39 +-
 airflow/providers/google/cloud/hooks/datafusion.py |  37 +-
 airflow/providers/google/cloud/hooks/dataplex.py   |  47 +-
 airflow/providers/google/cloud/hooks/dataprep.py   |  14 +-
 airflow/providers/google/cloud/hooks/dataproc.py   | 344 ++++++-------
 .../google/cloud/hooks/dataproc_metastore.py       | 106 ++--
 airflow/providers/google/cloud/hooks/datastore.py  |  31 +-
 airflow/providers/google/cloud/hooks/dlp.py        | 352 +++++++------
 airflow/providers/google/cloud/hooks/functions.py  |  12 +-
 airflow/providers/google/cloud/hooks/gcs.py        | 116 ++---
 airflow/providers/google/cloud/hooks/gdm.py        |  18 +-
 airflow/providers/google/cloud/hooks/kms.py        |  25 +-
 .../google/cloud/hooks/kubernetes_engine.py        |  30 +-
 .../providers/google/cloud/hooks/life_sciences.py  |   7 +-
 airflow/providers/google/cloud/hooks/looker.py     |  10 +-
 airflow/providers/google/cloud/hooks/mlengine.py   |  12 +-
 .../google/cloud/hooks/natural_language.py         |  66 +--
 airflow/providers/google/cloud/hooks/os_login.py   |  16 +-
 airflow/providers/google/cloud/hooks/pubsub.py     |  78 +--
 .../providers/google/cloud/hooks/secret_manager.py |  12 +-
 airflow/providers/google/cloud/hooks/spanner.py    |  20 +-
 .../providers/google/cloud/hooks/speech_to_text.py |  16 +-
 .../providers/google/cloud/hooks/stackdriver.py    | 108 ++--
 airflow/providers/google/cloud/hooks/tasks.py      | 120 ++---
 .../providers/google/cloud/hooks/text_to_speech.py |  18 +-
 airflow/providers/google/cloud/hooks/translate.py  |  16 +-
 .../google/cloud/hooks/vertex_ai/auto_ml.py        | 266 +++++-----
 .../cloud/hooks/vertex_ai/batch_prediction_job.py  |  74 +--
 .../google/cloud/hooks/vertex_ai/custom_job.py     | 542 ++++++++++-----------
 .../google/cloud/hooks/vertex_ai/dataset.py        | 112 ++---
 .../cloud/hooks/vertex_ai/endpoint_service.py      |  78 +--
 .../hooks/vertex_ai/hyperparameter_tuning_job.py   | 102 ++--
 .../google/cloud/hooks/vertex_ai/model_service.py  |  48 +-
 .../google/cloud/hooks/video_intelligence.py       |  26 +-
 airflow/providers/google/cloud/hooks/vision.py     | 170 +++----
 airflow/providers/google/cloud/hooks/workflows.py  |  69 +--
 airflow/providers/google/cloud/links/base.py       |   8 +-
 airflow/providers/google/cloud/links/bigquery.py   |  10 +-
 .../providers/google/cloud/links/bigquery_dts.py   |   4 +-
 airflow/providers/google/cloud/links/bigtable.py   |   7 +-
 .../providers/google/cloud/links/cloud_build.py    |   9 +-
 .../google/cloud/links/cloud_memorystore.py        |  20 +-
 airflow/providers/google/cloud/links/cloud_sql.py  |  12 +-
 .../providers/google/cloud/links/cloud_tasks.py    |  14 +-
 .../providers/google/cloud/links/datacatalog.py    |  16 +-
 airflow/providers/google/cloud/links/dataflow.py   |  12 +-
 airflow/providers/google/cloud/links/dataform.py   |   4 +-
 airflow/providers/google/cloud/links/dataplex.py   |   5 +-
 airflow/providers/google/cloud/links/dataproc.py   |  15 +-
 airflow/providers/google/cloud/links/datastore.py  |   5 +-
 .../google/cloud/links/kubernetes_engine.py        |   7 +-
 .../providers/google/cloud/links/life_sciences.py  |   3 +-
 airflow/providers/google/cloud/links/pubsub.py     |  14 +-
 airflow/providers/google/cloud/links/spanner.py    |  12 +-
 .../providers/google/cloud/links/stackdriver.py    |  12 +-
 airflow/providers/google/cloud/links/vertex_ai.py  |  25 +-
 airflow/providers/google/cloud/links/workflows.py  |  16 +-
 .../providers/google/cloud/log/gcs_task_handler.py |  14 +-
 .../google/cloud/log/stackdriver_task_handler.py   |  42 +-
 airflow/providers/google/cloud/operators/automl.py | 158 +++---
 .../providers/google/cloud/operators/bigquery.py   | 290 +++++------
 .../google/cloud/operators/bigquery_dts.py         |  52 +-
 .../providers/google/cloud/operators/bigtable.py   |  68 +--
 .../google/cloud/operators/cloud_build.py          | 160 +++---
 .../google/cloud/operators/cloud_composer.py       | 110 +++--
 .../google/cloud/operators/cloud_memorystore.py    | 250 +++++-----
 .../providers/google/cloud/operators/cloud_sql.py  |  70 +--
 .../operators/cloud_storage_transfer_service.py    |  85 ++--
 .../providers/google/cloud/operators/compute.py    |  37 +-
 .../google/cloud/operators/datacatalog.py          | 315 ++++++------
 .../providers/google/cloud/operators/dataflow.py   | 112 ++---
 .../providers/google/cloud/operators/dataform.py   |  67 +--
 .../providers/google/cloud/operators/datafusion.py | 131 ++---
 .../providers/google/cloud/operators/dataplex.py   |  65 +--
 .../providers/google/cloud/operators/dataprep.py   |   8 +-
 .../providers/google/cloud/operators/dataproc.py   | 324 ++++++------
 .../google/cloud/operators/dataproc_metastore.py   | 171 ++++---
 .../providers/google/cloud/operators/datastore.py  |  93 ++--
 airflow/providers/google/cloud/operators/dlp.py    | 498 ++++++++++---------
 .../providers/google/cloud/operators/functions.py  |  27 +-
 airflow/providers/google/cloud/operators/gcs.py    |  92 ++--
 .../google/cloud/operators/kubernetes_engine.py    |  30 +-
 .../google/cloud/operators/life_sciences.py        |   9 +-
 airflow/providers/google/cloud/operators/looker.py |  14 +-
 .../providers/google/cloud/operators/mlengine.py   | 152 +++---
 .../google/cloud/operators/natural_language.py     |  50 +-
 airflow/providers/google/cloud/operators/pubsub.py | 108 ++--
 .../providers/google/cloud/operators/spanner.py    |  50 +-
 .../google/cloud/operators/speech_to_text.py       |  14 +-
 .../google/cloud/operators/stackdriver.py          | 187 +++----
 airflow/providers/google/cloud/operators/tasks.py  | 161 +++---
 .../google/cloud/operators/text_to_speech.py       |  19 +-
 .../providers/google/cloud/operators/translate.py  |  12 +-
 .../google/cloud/operators/translate_speech.py     |  12 +-
 .../google/cloud/operators/vertex_ai/auto_ml.py    | 134 ++---
 .../operators/vertex_ai/batch_prediction_job.py    |  86 ++--
 .../google/cloud/operators/vertex_ai/custom_job.py | 116 ++---
 .../google/cloud/operators/vertex_ai/dataset.py    | 110 ++---
 .../cloud/operators/vertex_ai/endpoint_service.py  | 112 ++---
 .../vertex_ai/hyperparameter_tuning_job.py         |  84 ++--
 .../cloud/operators/vertex_ai/model_service.py     |  66 +--
 .../google/cloud/operators/video_intelligence.py   |  52 +-
 airflow/providers/google/cloud/operators/vision.py | 227 ++++-----
 .../providers/google/cloud/operators/workflows.py  | 124 ++---
 .../google/cloud/secrets/secret_manager.py         |  22 +-
 airflow/providers/google/cloud/sensors/bigquery.py |  20 +-
 .../providers/google/cloud/sensors/bigquery_dts.py |  26 +-
 airflow/providers/google/cloud/sensors/bigtable.py |  10 +-
 .../sensors/cloud_storage_transfer_service.py      |  12 +-
 airflow/providers/google/cloud/sensors/dataflow.py |  46 +-
 airflow/providers/google/cloud/sensors/dataform.py |  16 +-
 .../providers/google/cloud/sensors/datafusion.py   |  14 +-
 airflow/providers/google/cloud/sensors/dataplex.py |  15 +-
 airflow/providers/google/cloud/sensors/dataproc.py |  14 +-
 airflow/providers/google/cloud/sensors/gcs.py      |  39 +-
 airflow/providers/google/cloud/sensors/looker.py   |   8 +-
 airflow/providers/google/cloud/sensors/pubsub.py   |  18 +-
 airflow/providers/google/cloud/sensors/tasks.py    |  12 +-
 .../providers/google/cloud/sensors/workflows.py    |  19 +-
 .../google/cloud/transfers/adls_to_gcs.py          |  10 +-
 .../cloud/transfers/azure_fileshare_to_gcs.py      |  11 +-
 .../google/cloud/transfers/bigquery_to_bigquery.py |  18 +-
 .../google/cloud/transfers/bigquery_to_gcs.py      |  30 +-
 .../google/cloud/transfers/bigquery_to_mssql.py    |  16 +-
 .../google/cloud/transfers/bigquery_to_mysql.py    |  16 +-
 .../google/cloud/transfers/calendar_to_gcs.py      |  41 +-
 .../google/cloud/transfers/cassandra_to_gcs.py     |  35 +-
 .../google/cloud/transfers/facebook_ads_to_gcs.py  |  24 +-
 .../google/cloud/transfers/gcs_to_bigquery.py      |   7 +-
 .../providers/google/cloud/transfers/gcs_to_gcs.py |   8 +-
 .../google/cloud/transfers/gcs_to_local.py         |  15 +-
 .../google/cloud/transfers/gcs_to_sftp.py          |  12 +-
 .../google/cloud/transfers/gdrive_to_gcs.py        |  13 +-
 .../google/cloud/transfers/gdrive_to_local.py      |  11 +-
 .../google/cloud/transfers/local_to_gcs.py         |   8 +-
 .../google/cloud/transfers/mssql_to_gcs.py         |   5 +-
 .../google/cloud/transfers/mysql_to_gcs.py         |   4 +-
 .../google/cloud/transfers/oracle_to_gcs.py        |   4 +-
 .../google/cloud/transfers/postgres_to_gcs.py      |   4 +-
 .../google/cloud/transfers/presto_to_gcs.py        |  12 +-
 .../providers/google/cloud/transfers/s3_to_gcs.py  |   8 +-
 .../google/cloud/transfers/salesforce_to_gcs.py    |   7 +-
 .../google/cloud/transfers/sftp_to_gcs.py          |  14 +-
 .../google/cloud/transfers/sheets_to_gcs.py        |  17 +-
 .../providers/google/cloud/transfers/sql_to_gcs.py |  20 +-
 .../google/cloud/transfers/trino_to_gcs.py         |  12 +-
 .../providers/google/cloud/triggers/bigquery.py    |  49 +-
 .../google/cloud/triggers/cloud_composer.py        |   9 +-
 .../providers/google/cloud/triggers/dataproc.py    |  10 +-
 .../google/cloud/utils/bigquery_get_data.py        |   6 +-
 .../google/cloud/utils/credentials_provider.py     |  52 +-
 .../google/cloud/utils/field_sanitizer.py          |   5 +-
 .../google/cloud/utils/field_validator.py          |  11 +-
 airflow/providers/google/cloud/utils/helpers.py    |   5 +-
 .../google/cloud/utils/mlengine_operator_utils.py  |  25 +-
 .../cloud/utils/mlengine_prediction_summary.py     |   1 +
 .../google/common/auth_backend/google_openid.py    |   8 +-
 airflow/providers/google/common/consts.py          |   2 +
 .../providers/google/common/hooks/base_google.py   |  37 +-
 .../providers/google/common/hooks/discovery_api.py |   9 +-
 airflow/providers/google/common/links/storage.py   |   8 +-
 .../google/common/utils/id_token_credentials.py    |  20 +-
 .../providers/google/firebase/hooks/firestore.py   |   9 +-
 .../google/firebase/operators/firestore.py         |  11 +-
 airflow/providers/google/go_module_utils.py        |   3 +-
 airflow/providers/google/leveldb/hooks/leveldb.py  |  14 +-
 .../providers/google/leveldb/operators/leveldb.py  |  14 +-
 .../example_dags/example_display_video.py          |   8 +-
 .../google/marketing_platform/hooks/analytics.py   |  18 +-
 .../marketing_platform/hooks/campaign_manager.py   |  32 +-
 .../marketing_platform/hooks/display_video.py      |  23 +-
 .../google/marketing_platform/hooks/search_ads.py  |  10 +-
 .../marketing_platform/operators/analytics.py      |  38 +-
 .../operators/campaign_manager.py                  |  52 +-
 .../marketing_platform/operators/display_video.py  |  68 +--
 .../marketing_platform/operators/search_ads.py     |  20 +-
 .../marketing_platform/sensors/campaign_manager.py |  10 +-
 .../marketing_platform/sensors/display_video.py    |  17 +-
 .../marketing_platform/sensors/search_ads.py       |  10 +-
 .../suite/example_dags/example_local_to_drive.py   |   1 +
 airflow/providers/google/suite/hooks/calendar.py   |  50 +-
 airflow/providers/google/suite/hooks/drive.py      |  12 +-
 airflow/providers/google/suite/hooks/sheets.py     |  22 +-
 airflow/providers/google/suite/operators/sheets.py |  11 +-
 airflow/providers/google/suite/sensors/drive.py    |  11 +-
 .../google/suite/transfers/gcs_to_gdrive.py        |  12 +-
 .../google/suite/transfers/gcs_to_sheets.py        |   7 +-
 .../google/suite/transfers/local_to_drive.py       |  13 +-
 .../google/suite/transfers/sql_to_sheets.py        |  12 +-
 airflow/providers/grpc/hooks/grpc.py               |  13 +-
 airflow/providers/grpc/operators/grpc.py           |  15 +-
 .../hashicorp/_internal_client/vault_client.py     |  56 +--
 airflow/providers/hashicorp/hooks/vault.py         |  58 +--
 airflow/providers/hashicorp/secrets/vault.py       |  50 +-
 airflow/providers/http/hooks/http.py               |  22 +-
 airflow/providers/http/operators/http.py           |  18 +-
 airflow/providers/http/sensors/http.py             |  14 +-
 airflow/providers/imap/hooks/imap.py               |  30 +-
 airflow/providers/imap/sensors/imap_attachment.py  |   4 +-
 airflow/providers/influxdb/hooks/influxdb.py       |   8 +-
 airflow/providers/influxdb/operators/influxdb.py   |   4 +-
 airflow/providers/jdbc/hooks/jdbc.py               |  11 +-
 airflow/providers/jdbc/operators/jdbc.py           |   9 +-
 airflow/providers/jenkins/hooks/jenkins.py         |   6 +-
 .../jenkins/operators/jenkins_job_trigger.py       |   9 +-
 airflow/providers/jenkins/sensors/jenkins.py       |   7 +-
 airflow/providers/jira/hooks/jira.py               |   1 +
 airflow/providers/jira/operators/jira.py           |   1 +
 airflow/providers/jira/sensors/jira.py             |   1 +
 airflow/providers/microsoft/azure/hooks/adx.py     |  12 +-
 airflow/providers/microsoft/azure/hooks/asb.py     |  20 +-
 .../providers/microsoft/azure/hooks/azure_batch.py |   1 +
 .../azure/hooks/azure_container_instance.py        |   1 +
 .../azure/hooks/azure_container_registry.py        |   1 +
 .../azure/hooks/azure_container_volume.py          |   1 +
 .../microsoft/azure/hooks/azure_cosmos.py          |   1 +
 .../microsoft/azure/hooks/azure_data_factory.py    |   1 +
 .../microsoft/azure/hooks/azure_data_lake.py       |   1 +
 .../microsoft/azure/hooks/azure_fileshare.py       |   1 +
 .../providers/microsoft/azure/hooks/base_azure.py  |   7 +-
 airflow/providers/microsoft/azure/hooks/batch.py   |  45 +-
 .../microsoft/azure/hooks/container_instance.py    |   2 +-
 .../microsoft/azure/hooks/container_registry.py    |   5 +-
 .../microsoft/azure/hooks/container_volume.py      |   8 +-
 airflow/providers/microsoft/azure/hooks/cosmos.py  |  42 +-
 .../microsoft/azure/hooks/data_factory.py          | 128 ++---
 .../providers/microsoft/azure/hooks/data_lake.py   |  13 +-
 .../providers/microsoft/azure/hooks/fileshare.py   |  13 +-
 airflow/providers/microsoft/azure/hooks/synapse.py |  15 +-
 airflow/providers/microsoft/azure/hooks/wasb.py    |  23 +-
 .../microsoft/azure/log/wasb_task_handler.py       |   7 +-
 .../providers/microsoft/azure/operators/adls.py    |   5 +-
 .../microsoft/azure/operators/adls_delete.py       |   1 +
 .../microsoft/azure/operators/adls_list.py         |   1 +
 airflow/providers/microsoft/azure/operators/adx.py |  10 +-
 airflow/providers/microsoft/azure/operators/asb.py |  86 ++--
 .../microsoft/azure/operators/azure_batch.py       |   1 +
 .../azure/operators/azure_container_instances.py   |   1 +
 .../microsoft/azure/operators/azure_cosmos.py      |   1 +
 .../providers/microsoft/azure/operators/batch.py   |  53 +-
 .../azure/operators/container_instances.py         |  39 +-
 .../providers/microsoft/azure/operators/cosmos.py  |   4 +-
 .../microsoft/azure/operators/data_factory.py      |  21 +-
 .../providers/microsoft/azure/operators/synapse.py |   3 +-
 .../microsoft/azure/operators/wasb_delete_blob.py  |   5 +-
 .../microsoft/azure/secrets/azure_key_vault.py     |   1 +
 .../providers/microsoft/azure/secrets/key_vault.py |  13 +-
 .../microsoft/azure/sensors/azure_cosmos.py        |   1 +
 .../providers/microsoft/azure/sensors/cosmos.py    |   4 +-
 .../microsoft/azure/sensors/data_factory.py        |   9 +-
 airflow/providers/microsoft/azure/sensors/wasb.py  |  13 +-
 .../microsoft/azure/transfers/azure_blob_to_gcs.py |  11 +-
 .../microsoft/azure/transfers/file_to_wasb.py      |   2 +-
 .../microsoft/azure/transfers/local_to_adls.py     |   8 +-
 .../microsoft/azure/transfers/local_to_wasb.py     |   9 +-
 .../azure/transfers/oracle_to_azure_data_lake.py   |   9 +-
 .../microsoft/azure/transfers/sftp_to_wasb.py      |  20 +-
 airflow/providers/microsoft/mssql/hooks/mssql.py   |   8 +-
 .../providers/microsoft/mssql/operators/mssql.py   |  16 +-
 airflow/providers/microsoft/psrp/hooks/psrp.py     |  17 +-
 airflow/providers/microsoft/psrp/operators/psrp.py |  19 +-
 airflow/providers/microsoft/winrm/hooks/winrm.py   |  25 +-
 .../providers/microsoft/winrm/operators/winrm.py   |  15 +-
 airflow/providers/mongo/hooks/mongo.py             |  41 +-
 airflow/providers/mongo/sensors/mongo.py           |   4 +-
 airflow/providers/mysql/hooks/mysql.py             |  13 +-
 airflow/providers/mysql/operators/mysql.py         |  12 +-
 .../providers/mysql/transfers/presto_to_mysql.py   |   8 +-
 airflow/providers/mysql/transfers/s3_to_mysql.py   |   7 +-
 .../providers/mysql/transfers/trino_to_mysql.py    |   8 +-
 .../providers/mysql/transfers/vertica_to_mysql.py  |   9 +-
 airflow/providers/neo4j/hooks/neo4j.py             |   2 +-
 airflow/providers/neo4j/operators/neo4j.py         |   8 +-
 airflow/providers/odbc/hooks/odbc.py               |  22 +-
 airflow/providers/openfaas/hooks/openfaas.py       |  11 +-
 airflow/providers/opsgenie/hooks/opsgenie.py       |  18 +-
 airflow/providers/opsgenie/hooks/opsgenie_alert.py |   1 +
 airflow/providers/opsgenie/operators/opsgenie.py   |  59 +--
 .../providers/opsgenie/operators/opsgenie_alert.py |   1 +
 airflow/providers/oracle/hooks/oracle.py           |  14 +-
 airflow/providers/oracle/operators/oracle.py       |  14 +-
 .../providers/oracle/transfers/oracle_to_oracle.py |   8 +-
 airflow/providers/pagerduty/hooks/pagerduty.py     |  26 +-
 .../providers/pagerduty/hooks/pagerduty_events.py  |  24 +-
 airflow/providers/papermill/operators/papermill.py |  20 +-
 airflow/providers/plexus/hooks/plexus.py           |   2 +
 airflow/providers/plexus/operators/job.py          |   7 +-
 airflow/providers/postgres/hooks/postgres.py       |  16 +-
 airflow/providers/postgres/operators/postgres.py   |  16 +-
 airflow/providers/presto/hooks/presto.py           |  20 +-
 .../providers/presto/transfers/gcs_to_presto.py    |  13 +-
 airflow/providers/qubole/hooks/qubole.py           |  17 +-
 airflow/providers/qubole/hooks/qubole_check.py     |   8 +-
 airflow/providers/qubole/operators/qubole.py       |  12 +-
 airflow/providers/qubole/operators/qubole_check.py |  15 +-
 airflow/providers/qubole/sensors/qubole.py         |   4 +-
 airflow/providers/redis/hooks/redis.py             |   3 +-
 airflow/providers/redis/operators/redis_publish.py |   4 +-
 airflow/providers/redis/sensors/redis_key.py       |   4 +-
 airflow/providers/redis/sensors/redis_pub_sub.py   |   7 +-
 airflow/providers/salesforce/hooks/salesforce.py   |  23 +-
 airflow/providers/salesforce/operators/bulk.py     |   4 +-
 .../salesforce/operators/salesforce_apex_rest.py   |   4 +-
 airflow/providers/samba/hooks/samba.py             |   6 +-
 airflow/providers/segment/hooks/segment.py         |   3 +-
 .../segment/operators/segment_track_event.py       |   8 +-
 airflow/providers/sendgrid/utils/emailer.py        |  11 +-
 airflow/providers/sftp/hooks/sftp.py               |  38 +-
 airflow/providers/sftp/operators/sftp.py           |  14 +-
 airflow/providers/sftp/sensors/sftp.py             |  12 +-
 .../providers/singularity/operators/singularity.py |  25 +-
 airflow/providers/slack/hooks/slack.py             |  41 +-
 airflow/providers/slack/hooks/slack_webhook.py     |   6 +-
 airflow/providers/slack/operators/slack.py         |  30 +-
 airflow/providers/slack/operators/slack_webhook.py |  25 +-
 airflow/providers/slack/transfers/sql_to_slack.py  |  16 +-
 airflow/providers/slack/utils/__init__.py          |   5 +-
 airflow/providers/snowflake/hooks/snowflake.py     |  22 +-
 airflow/providers/snowflake/operators/snowflake.py |  72 +--
 .../snowflake/transfers/copy_into_snowflake.py     |  29 +-
 .../snowflake/transfers/s3_to_snowflake.py         |  25 +-
 .../snowflake/transfers/snowflake_to_slack.py      |  15 +-
 airflow/providers/snowflake/utils/common.py        |   1 +
 airflow/providers/sqlite/hooks/sqlite.py           |   1 +
 airflow/providers/sqlite/operators/sqlite.py       |   8 +-
 airflow/providers/ssh/hooks/ssh.py                 |  46 +-
 airflow/providers/ssh/operators/ssh.py             |  31 +-
 airflow/providers/tableau/hooks/tableau.py         |   8 +-
 airflow/providers/tableau/operators/tableau.py     |   8 +-
 .../tableau/operators/tableau_refresh_workbook.py  |   8 +-
 airflow/providers/tableau/sensors/tableau.py       |   8 +-
 .../tableau/sensors/tableau_job_status.py          |   2 +-
 airflow/providers/tabular/hooks/tabular.py         |   7 +-
 airflow/providers/telegram/hooks/telegram.py       |  12 +-
 airflow/providers/telegram/operators/telegram.py   |  12 +-
 airflow/providers/trino/hooks/trino.py             |  24 +-
 airflow/providers/trino/operators/trino.py         |  13 +-
 airflow/providers/trino/transfers/gcs_to_trino.py  |  13 +-
 airflow/providers/vertica/hooks/vertica.py         |   2 +-
 airflow/providers/vertica/operators/vertica.py     |   8 +-
 airflow/providers/yandex/hooks/yandex.py           |  19 +-
 .../providers/yandex/hooks/yandexcloud_dataproc.py |   2 +-
 .../yandex/operators/yandexcloud_dataproc.py       | 166 ++++---
 airflow/providers/zendesk/hooks/zendesk.py         |  14 +-
 tests/charts/conftest.py                           |   1 +
 tests/charts/helm_template_generator.py            |   5 +-
 tests/charts/test_airflow_common.py                |   1 +
 tests/charts/test_annotations.py                   |   1 +
 tests/charts/test_basic_helm_chart.py              |   6 +-
 tests/charts/test_celery_kubernetes_executor.py    |   1 +
 tests/charts/test_chart_quality.py                 |   2 +
 tests/charts/test_cleanup_pods.py                  |   1 +
 tests/charts/test_configmap.py                     |   1 +
 tests/charts/test_create_user_job.py               |   1 +
 tests/charts/test_dag_processor.py                 |   1 +
 tests/charts/test_dags_persistent_volume_claim.py  |   1 +
 tests/charts/test_elasticsearch_secret.py          |   1 +
 tests/charts/test_extra_configmaps_secrets.py      |   1 +
 tests/charts/test_extra_env_env_from.py            |   7 +-
 tests/charts/test_flower.py                        |   1 +
 tests/charts/test_git_sync_scheduler.py            |   1 +
 tests/charts/test_git_sync_triggerer.py            |   1 +
 tests/charts/test_git_sync_webserver.py            |   1 +
 tests/charts/test_git_sync_worker.py               |   1 +
 tests/charts/test_ingress_flower.py                |   1 +
 tests/charts/test_ingress_web.py                   |   1 +
 tests/charts/test_keda.py                          |   2 +
 tests/charts/test_kerberos.py                      |   1 +
 tests/charts/test_limit_ranges.py                  |   1 +
 tests/charts/test_logs_persistent_volume_claim.py  |   1 +
 tests/charts/test_metadata_connection_secret.py    |   1 +
 tests/charts/test_migrate_database_job.py          |   1 +
 tests/charts/test_pdb_pgbouncer.py                 |   1 +
 tests/charts/test_pdb_scheduler.py                 |   1 +
 tests/charts/test_pdb_webserver.py                 |   1 +
 tests/charts/test_pgbouncer.py                     |   1 +
 tests/charts/test_pod_launcher_role.py             |   1 +
 tests/charts/test_pod_template_file.py             |   2 +
 tests/charts/test_rbac.py                          |   1 +
 tests/charts/test_rbac_pod_log_reader.py           |   2 +-
 tests/charts/test_redis.py                         |   5 +-
 tests/charts/test_resource_quota.py                |   1 +
 .../test_result_backend_connection_secret.py       |   4 +-
 tests/charts/test_scc_rolebinding.py               |   1 +
 tests/charts/test_scheduler.py                     |   1 +
 tests/charts/test_security_context.py              |   1 +
 tests/charts/test_statsd.py                        |   1 +
 tests/charts/test_triggerer.py                     |   1 +
 tests/charts/test_webserver.py                     |   1 +
 tests/charts/test_worker.py                        |   1 +
 tests/providers/airbyte/hooks/test_airbyte.py      |   3 +-
 tests/providers/airbyte/operators/test_airbyte.py  |   3 +-
 tests/providers/airbyte/sensors/test_airbyte.py    |   1 +
 tests/providers/alibaba/cloud/hooks/test_oss.py    |   3 +-
 .../alibaba/cloud/log/test_oss_task_handler.py     |   3 +-
 .../providers/alibaba/cloud/operators/test_oss.py  |   3 +-
 .../alibaba/cloud/sensors/test_oss_key.py          |   2 +-
 tests/providers/alibaba/cloud/utils/oss_mock.py    |   2 +
 tests/providers/alibaba/cloud/utils/test_utils.py  |   1 +
 tests/providers/amazon/aws/hooks/conftest.py       |   2 +-
 tests/providers/amazon/aws/hooks/test_appflow.py   |   1 +
 tests/providers/amazon/aws/hooks/test_athena.py    |   3 +-
 tests/providers/amazon/aws/hooks/test_base_aws.py  |   3 +-
 .../amazon/aws/hooks/test_batch_client.py          |   2 +-
 .../amazon/aws/hooks/test_batch_waiters.py         |  35 +-
 .../amazon/aws/hooks/test_cloud_formation.py       |   3 +-
 tests/providers/amazon/aws/hooks/test_datasync.py  |   3 +-
 tests/providers/amazon/aws/hooks/test_dms_task.py  |  15 +-
 tests/providers/amazon/aws/hooks/test_dynamodb.py  |   2 +-
 tests/providers/amazon/aws/hooks/test_ec2.py       |   2 +-
 tests/providers/amazon/aws/hooks/test_ecs.py       |   1 +
 tests/providers/amazon/aws/hooks/test_eks.py       | 132 ++---
 .../hooks/test_elasticache_replication_group.py    |   1 +
 tests/providers/amazon/aws/hooks/test_emr.py       |   2 +-
 .../amazon/aws/hooks/test_emr_containers.py        |   2 +-
 .../amazon/aws/hooks/test_emr_serverless.py        |   2 +-
 tests/providers/amazon/aws/hooks/test_glacier.py   |   1 +
 tests/providers/amazon/aws/hooks/test_glue.py      |   2 +
 .../amazon/aws/hooks/test_glue_catalog.py          |   1 +
 .../amazon/aws/hooks/test_glue_crawler.py          |   2 +
 tests/providers/amazon/aws/hooks/test_kinesis.py   |   1 +
 .../amazon/aws/hooks/test_lambda_function.py       |   3 +-
 tests/providers/amazon/aws/hooks/test_logs.py      |   3 +-
 .../providers/amazon/aws/hooks/test_quicksight.py  |   1 +
 tests/providers/amazon/aws/hooks/test_rds.py       |   2 +-
 .../amazon/aws/hooks/test_redshift_cluster.py      |   1 +
 .../amazon/aws/hooks/test_redshift_data.py         |   3 +-
 .../amazon/aws/hooks/test_redshift_sql.py          |   1 +
 tests/providers/amazon/aws/hooks/test_s3.py        |   3 +-
 tests/providers/amazon/aws/hooks/test_sagemaker.py |   3 +-
 .../amazon/aws/hooks/test_secrets_manager.py       |   2 +-
 tests/providers/amazon/aws/hooks/test_ses.py       |   1 +
 tests/providers/amazon/aws/hooks/test_sns.py       |   2 +-
 tests/providers/amazon/aws/hooks/test_sqs.py       |   2 +-
 .../amazon/aws/hooks/test_step_function.py         |   2 +-
 tests/providers/amazon/aws/hooks/test_sts.py       |   1 +
 tests/providers/amazon/aws/links/conftest.py       |   1 +
 tests/providers/amazon/aws/links/test_base.py      |   1 +
 tests/providers/amazon/aws/links/test_links.py     |   1 +
 .../amazon/aws/log/test_cloudwatch_task_handler.py |   2 +
 .../amazon/aws/log/test_s3_task_handler.py         |   1 +
 .../providers/amazon/aws/operators/test_appflow.py |   1 +
 .../providers/amazon/aws/operators/test_athena.py  |   1 +
 tests/providers/amazon/aws/operators/test_batch.py |   3 +-
 .../amazon/aws/operators/test_cloud_formation.py   |   2 +
 .../amazon/aws/operators/test_datasync.py          |   2 +
 .../amazon/aws/operators/test_dms_create_task.py   |   2 +
 .../amazon/aws/operators/test_dms_delete_task.py   |   2 +
 .../aws/operators/test_dms_describe_tasks.py       |   2 +
 .../amazon/aws/operators/test_dms_start_task.py    |   2 +
 .../amazon/aws/operators/test_dms_stop_task.py     |   2 +
 tests/providers/amazon/aws/operators/test_ec2.py   |   2 +-
 tests/providers/amazon/aws/operators/test_ecs.py   |   2 +-
 tests/providers/amazon/aws/operators/test_eks.py   |  12 +-
 .../amazon/aws/operators/test_emr_add_steps.py     |   1 +
 .../amazon/aws/operators/test_emr_containers.py    |   1 +
 .../aws/operators/test_emr_create_job_flow.py      |   2 +-
 .../aws/operators/test_emr_modify_cluster.py       |   1 +
 .../amazon/aws/operators/test_emr_serverless.py    |   1 +
 .../aws/operators/test_emr_terminate_job_flow.py   |   1 +
 .../amazon/aws/operators/test_example_s3_bucket.py |   2 +
 .../providers/amazon/aws/operators/test_glacier.py |   1 +
 tests/providers/amazon/aws/operators/test_glue.py  |   1 +
 .../amazon/aws/operators/test_glue_crawler.py      |   1 +
 .../providers/amazon/aws/operators/test_lambda.py  |   1 +
 .../amazon/aws/operators/test_quicksight.py        |   1 +
 tests/providers/amazon/aws/operators/test_rds.py   |   3 +-
 .../amazon/aws/operators/test_redshift_cluster.py  |   1 +
 .../amazon/aws/operators/test_redshift_data.py     |   1 +
 .../amazon/aws/operators/test_redshift_sql.py      |   1 +
 .../amazon/aws/operators/test_s3_bucket.py         |   2 +
 .../amazon/aws/operators/test_s3_bucket_tagging.py |   2 +
 .../amazon/aws/operators/test_s3_file_transform.py |   2 +-
 .../providers/amazon/aws/operators/test_s3_list.py |   1 +
 .../amazon/aws/operators/test_s3_list_prefixes.py  |   1 +
 .../amazon/aws/operators/test_s3_object.py         |   2 +
 .../amazon/aws/operators/test_sagemaker_base.py    |   9 +-
 .../aws/operators/test_sagemaker_endpoint.py       |  12 +-
 .../operators/test_sagemaker_endpoint_config.py    |   6 +-
 .../amazon/aws/operators/test_sagemaker_model.py   |   6 +-
 .../aws/operators/test_sagemaker_processing.py     |  10 +-
 .../aws/operators/test_sagemaker_training.py       |   4 +-
 .../aws/operators/test_sagemaker_transform.py      |  10 +-
 .../amazon/aws/operators/test_sagemaker_tuning.py  |   6 +-
 tests/providers/amazon/aws/operators/test_sns.py   |   2 +-
 tests/providers/amazon/aws/operators/test_sqs.py   |   2 +-
 .../amazon/aws/operators/test_step_function.py     |   1 +
 .../amazon/aws/secrets/test_secrets_manager.py     |   1 +
 .../amazon/aws/secrets/test_systems_manager.py     |   1 +
 tests/providers/amazon/aws/sensors/test_athena.py  |   1 +
 tests/providers/amazon/aws/sensors/test_batch.py   |   1 +
 .../amazon/aws/sensors/test_cloud_formation.py     |   2 +
 .../providers/amazon/aws/sensors/test_dms_task.py  |   1 +
 tests/providers/amazon/aws/sensors/test_ec2.py     |   2 +-
 tests/providers/amazon/aws/sensors/test_eks.py     |   3 +-
 .../providers/amazon/aws/sensors/test_emr_base.py  |   1 +
 .../amazon/aws/sensors/test_emr_containers.py      |   1 +
 .../amazon/aws/sensors/test_emr_job_flow.py        |   1 +
 .../providers/amazon/aws/sensors/test_emr_step.py  |   1 +
 tests/providers/amazon/aws/sensors/test_glacier.py |   1 +
 tests/providers/amazon/aws/sensors/test_glue.py    |   1 +
 .../aws/sensors/test_glue_catalog_partition.py     |   1 +
 .../amazon/aws/sensors/test_glue_crawler.py        |   1 +
 .../amazon/aws/sensors/test_quicksight.py          |   1 +
 tests/providers/amazon/aws/sensors/test_rds.py     |   1 +
 .../amazon/aws/sensors/test_redshift_cluster.py    |   1 +
 tests/providers/amazon/aws/sensors/test_s3_key.py  |   4 +-
 .../amazon/aws/sensors/test_s3_keys_unchanged.py   |   1 +
 .../amazon/aws/sensors/test_sagemaker_base.py      |   1 +
 .../amazon/aws/sensors/test_sagemaker_endpoint.py  |   1 +
 .../amazon/aws/sensors/test_sagemaker_training.py  |   1 +
 .../amazon/aws/sensors/test_sagemaker_transform.py |   1 +
 .../amazon/aws/sensors/test_sagemaker_tuning.py    |   1 +
 tests/providers/amazon/aws/sensors/test_sqs.py     |   2 +-
 .../amazon/aws/sensors/test_step_function.py       |   1 +
 .../amazon/aws/system/utils/test_helpers.py        |   3 +-
 .../amazon/aws/transfers/test_dynamodb_to_s3.py    |   3 +-
 .../amazon/aws/transfers/test_exasol_to_s3.py      |   2 +
 .../amazon/aws/transfers/test_ftp_to_s3.py         |   2 +
 .../amazon/aws/transfers/test_gcs_to_s3.py         |   1 +
 .../amazon/aws/transfers/test_glacier_to_gcs.py    |   2 +
 .../amazon/aws/transfers/test_google_api_to_s3.py  |   3 +-
 .../amazon/aws/transfers/test_hive_to_dynamodb.py  |   2 +-
 .../aws/transfers/test_imap_attachment_to_s3.py    |   1 +
 .../amazon/aws/transfers/test_local_to_s3.py       |   2 +-
 .../amazon/aws/transfers/test_mongo_to_s3.py       |   2 +
 .../amazon/aws/transfers/test_redshift_to_s3.py    |   2 +-
 .../amazon/aws/transfers/test_s3_to_ftp.py         |   2 +
 .../amazon/aws/transfers/test_s3_to_redshift.py    |   2 +-
 .../amazon/aws/transfers/test_s3_to_sftp.py        |   2 +
 .../amazon/aws/transfers/test_salesforce_to_s3.py  |   1 +
 .../amazon/aws/transfers/test_sftp_to_s3.py        |   1 +
 .../amazon/aws/transfers/test_sql_to_s3.py         |   3 +-
 .../amazon/aws/utils/eks_test_constants.py         |  60 +--
 tests/providers/amazon/aws/utils/eks_test_utils.py |  33 +-
 .../providers/amazon/aws/utils/links_test_utils.py |   1 +
 .../amazon/aws/utils/test_connection_wrapper.py    |   4 +-
 .../amazon/aws/utils/test_eks_get_token.py         |   1 +
 tests/providers/amazon/aws/utils/test_emailer.py   |   3 +-
 tests/providers/amazon/aws/utils/test_redshift.py  |   2 +-
 tests/providers/amazon/aws/utils/test_utils.py     |   1 +
 tests/providers/apache/beam/hooks/test_beam.py     |   2 +-
 tests/providers/apache/beam/operators/test_beam.py |   3 +-
 .../apache/cassandra/hooks/test_cassandra.py       |   1 +
 .../apache/cassandra/sensors/test_record.py        |   1 +
 .../apache/cassandra/sensors/test_table.py         |   1 +
 tests/providers/apache/drill/hooks/test_drill.py   |   2 +-
 .../providers/apache/drill/operators/test_drill.py |   1 +
 tests/providers/apache/druid/hooks/test_druid.py   |   2 +-
 .../providers/apache/druid/operators/test_druid.py |   3 +-
 .../apache/druid/transfers/test_hive_to_druid.py   |   2 +-
 tests/providers/apache/hdfs/hooks/test_hdfs.py     |   1 +
 tests/providers/apache/hdfs/hooks/test_webhdfs.py  |   1 +
 tests/providers/apache/hdfs/sensors/test_hdfs.py   |   2 +
 .../providers/apache/hdfs/sensors/test_web_hdfs.py |   1 +
 tests/providers/apache/hive/__init__.py            |   4 +-
 tests/providers/apache/hive/hooks/test_hive.py     |   2 +-
 tests/providers/apache/hive/operators/test_hive.py |   1 +
 .../apache/hive/operators/test_hive_stats.py       |   1 +
 tests/providers/apache/hive/sensors/test_hdfs.py   |   1 +
 .../apache/hive/sensors/test_hive_partition.py     |   1 +
 .../hive/sensors/test_metastore_partition.py       |   1 +
 .../hive/sensors/test_named_hive_partition.py      |   2 +
 .../apache/hive/transfers/test_hive_to_mysql.py    |   2 +
 .../apache/hive/transfers/test_hive_to_samba.py    |   2 +
 .../apache/hive/transfers/test_mssql_to_hive.py    |   2 +-
 .../apache/hive/transfers/test_mysql_to_hive.py    |   1 +
 .../apache/hive/transfers/test_s3_to_hive.py       |   1 +
 .../apache/hive/transfers/test_vertica_to_hive.py  |   1 +
 tests/providers/apache/kylin/hooks/test_kylin.py   |   2 +-
 .../apache/kylin/operators/test_kylin_cube.py      |   1 +
 tests/providers/apache/livy/hooks/test_livy.py     |   1 +
 tests/providers/apache/livy/operators/test_livy.py |   2 +-
 tests/providers/apache/livy/sensors/test_livy.py   |   2 +-
 tests/providers/apache/pig/hooks/test_pig.py       |   1 +
 tests/providers/apache/pig/operators/test_pig.py   |   1 +
 tests/providers/apache/pinot/hooks/test_pinot.py   |   2 +-
 .../apache/spark/hooks/test_spark_jdbc.py          |   3 +-
 .../apache/spark/hooks/test_spark_jdbc_script.py   |   2 +
 .../providers/apache/spark/hooks/test_spark_sql.py |   1 +
 .../apache/spark/hooks/test_spark_submit.py        |   1 +
 .../apache/spark/operators/test_spark_jdbc.py      |   2 +-
 .../apache/spark/operators/test_spark_sql.py       |   2 +-
 .../apache/spark/operators/test_spark_submit.py    |   2 +-
 tests/providers/apache/sqoop/hooks/test_sqoop.py   |   2 +-
 .../providers/apache/sqoop/operators/test_sqoop.py |   2 +-
 tests/providers/arangodb/hooks/test_arangodb.py    |   1 +
 .../providers/arangodb/operators/test_arangodb.py  |   1 +
 tests/providers/arangodb/sensors/test_arangodb.py  |   2 +-
 tests/providers/asana/hooks/test_asana.py          |   3 +-
 .../providers/asana/operators/test_asana_tasks.py  |   1 +
 tests/providers/atlassian/jira/hooks/test_jira.py  |   2 +-
 .../atlassian/jira/operators/test_jira.py          |   2 +-
 .../providers/atlassian/jira/sensors/test_jira.py  |   2 +-
 .../providers/celery/sensors/test_celery_queue.py  |   1 +
 tests/providers/cloudant/hooks/test_cloudant.py    |   2 +
 .../cncf/kubernetes/decorators/test_kubernetes.py  |   1 +
 .../cncf/kubernetes/hooks/test_kubernetes.py       |   2 +-
 .../kubernetes/operators/test_kubernetes_pod.py    |   2 +
 .../kubernetes/operators/test_spark_kubernetes.py  |   2 +-
 .../kubernetes/sensors/test_spark_kubernetes.py    |   2 +-
 .../cncf/kubernetes/utils/test_pod_manager.py      |   2 +
 tests/providers/common/sql/hooks/test_dbapi.py     |   2 +-
 tests/providers/common/sql/hooks/test_sqlparse.py  |   1 +
 tests/providers/common/sql/operators/test_sql.py   |   2 +
 tests/providers/common/sql/sensors/test_sql.py     |   2 +
 .../providers/databricks/hooks/test_databricks.py  |   2 +-
 .../databricks/hooks/test_databricks_sql.py        |   2 +-
 .../databricks/operators/test_databricks.py        |   3 +-
 .../databricks/operators/test_databricks_repos.py  |   3 +-
 .../databricks/operators/test_databricks_sql.py    |   3 +-
 .../databricks/triggers/test_databricks.py         |   2 +-
 tests/providers/databricks/utils/databricks.py     |   2 +-
 tests/providers/datadog/hooks/test_datadog.py      |   3 +-
 tests/providers/datadog/sensors/test_datadog.py    |   1 +
 tests/providers/dbt/cloud/hooks/test_dbt_cloud.py  |   1 +
 .../dbt/cloud/operators/test_dbt_cloud.py          |   1 +
 .../providers/dbt/cloud/sensors/test_dbt_cloud.py  |   1 +
 tests/providers/dingding/hooks/test_dingding.py    |   1 +
 .../providers/dingding/operators/test_dingding.py  |   1 +
 .../discord/hooks/test_discord_webhook.py          |   3 +-
 .../discord/operators/test_discord_webhook.py      |   3 +-
 tests/providers/docker/decorators/test_docker.py   |   1 +
 tests/providers/docker/hooks/test_docker.py        |   1 +
 tests/providers/docker/operators/test_docker.py    |   2 +
 .../docker/operators/test_docker_swarm.py          |   1 +
 .../elasticsearch/hooks/test_elasticsearch.py      |   2 +-
 .../elasticsearch/log/elasticmock/__init__.py      |   1 +
 .../log/elasticmock/fake_elasticsearch.py          |  17 +-
 .../log/elasticmock/utilities/__init__.py          |   1 +
 .../elasticsearch/log/test_es_task_handler.py      |   2 +
 tests/providers/exasol/hooks/test_exasol.py        |   2 +-
 tests/providers/exasol/operators/test_exasol.py    |   1 +
 tests/providers/facebook/ads/hooks/test_ads.py     |   1 +
 tests/providers/ftp/hooks/test_ftp.py              |   2 +
 tests/providers/ftp/sensors/test_ftp.py            |   1 +
 tests/providers/github/hooks/test_github.py        |   2 +-
 tests/providers/github/operators/test_github.py    |   2 +-
 tests/providers/github/sensors/test_github.py      |   2 +-
 tests/providers/google/ads/hooks/test_ads.py       |   1 +
 tests/providers/google/ads/operators/test_ads.py   |   2 +
 .../google/ads/transfers/test_ads_to_gcs.py        |   1 +
 .../_internal_client/test_secret_manager_client.py |   1 +
 tests/providers/google/cloud/hooks/test_automl.py  |   3 +-
 .../providers/google/cloud/hooks/test_bigquery.py  |   2 +-
 .../google/cloud/hooks/test_bigquery_dts.py        |   1 +
 .../google/cloud/hooks/test_bigquery_system.py     |   1 +
 .../providers/google/cloud/hooks/test_bigtable.py  |   1 +
 .../google/cloud/hooks/test_cloud_build.py         |   3 +-
 .../google/cloud/hooks/test_cloud_composer.py      |   2 +-
 .../google/cloud/hooks/test_cloud_memorystore.py   |   2 +
 .../providers/google/cloud/hooks/test_cloud_sql.py |   2 +-
 .../hooks/test_cloud_storage_transfer_service.py   |   3 +-
 tests/providers/google/cloud/hooks/test_compute.py |   2 +-
 .../google/cloud/hooks/test_compute_ssh.py         |   2 +
 .../google/cloud/hooks/test_compute_ssh_system.py  |   2 +
 .../google/cloud/hooks/test_datacatalog.py         |  21 +-
 .../providers/google/cloud/hooks/test_dataflow.py  |   8 +-
 .../providers/google/cloud/hooks/test_dataform.py  |   2 +-
 .../google/cloud/hooks/test_datafusion.py          |   1 +
 .../providers/google/cloud/hooks/test_dataplex.py  |   1 +
 .../providers/google/cloud/hooks/test_dataprep.py  |   2 +
 .../providers/google/cloud/hooks/test_dataproc.py  |   3 +-
 .../google/cloud/hooks/test_dataproc_metastore.py  |   2 +-
 .../providers/google/cloud/hooks/test_datastore.py |   2 +-
 tests/providers/google/cloud/hooks/test_dlp.py     |   3 +-
 .../providers/google/cloud/hooks/test_functions.py |   1 +
 tests/providers/google/cloud/hooks/test_gcs.py     |   1 +
 tests/providers/google/cloud/hooks/test_gdm.py     |   1 +
 tests/providers/google/cloud/hooks/test_kms.py     |   1 +
 .../google/cloud/hooks/test_kms_system.py          |   1 +
 .../google/cloud/hooks/test_kubernetes_engine.py   |   3 +-
 .../google/cloud/hooks/test_life_sciences.py       |   2 +
 tests/providers/google/cloud/hooks/test_looker.py  |   3 +-
 .../providers/google/cloud/hooks/test_mlengine.py  |   2 +
 .../google/cloud/hooks/test_natural_language.py    |   3 +-
 .../providers/google/cloud/hooks/test_os_login.py  |  10 +-
 tests/providers/google/cloud/hooks/test_pubsub.py  |   4 +-
 .../google/cloud/hooks/test_secret_manager.py      |   2 +
 .../cloud/hooks/test_secret_manager_system.py      |   1 +
 tests/providers/google/cloud/hooks/test_spanner.py |   1 +
 .../google/cloud/hooks/test_speech_to_text.py      |   2 +-
 .../google/cloud/hooks/test_stackdriver.py         |   1 +
 tests/providers/google/cloud/hooks/test_tasks.py   |   3 +-
 .../google/cloud/hooks/test_text_to_speech.py      |   2 +-
 .../providers/google/cloud/hooks/test_translate.py |   1 +
 .../google/cloud/hooks/test_video_intelligence.py  |   3 +-
 tests/providers/google/cloud/hooks/test_vision.py  |   2 +
 .../providers/google/cloud/hooks/test_workflows.py |   1 +
 .../google/cloud/hooks/vertex_ai/test_auto_ml.py   |   2 +-
 .../hooks/vertex_ai/test_batch_prediction_job.py   |   2 +-
 .../cloud/hooks/vertex_ai/test_custom_job.py       |   2 +-
 .../google/cloud/hooks/vertex_ai/test_dataset.py   |   2 +-
 .../cloud/hooks/vertex_ai/test_endpoint_service.py |   2 +-
 .../vertex_ai/test_hyperparameter_tuning_job.py    |   2 +-
 .../cloud/hooks/vertex_ai/test_model_service.py    |   2 +-
 .../google/cloud/log/test_gcs_task_handler.py      |   2 +
 .../cloud/log/test_gcs_task_handler_system.py      |   2 +
 .../cloud/log/test_stackdriver_task_handler.py     |   1 +
 .../log/test_stackdriver_task_handler_system.py    |   2 +
 .../google/cloud/operators/test_automl.py          |   3 +-
 .../google/cloud/operators/test_automl_system.py   |   2 +
 .../google/cloud/operators/test_bigquery.py        |   2 +
 .../google/cloud/operators/test_bigquery_dts.py    |   2 +
 .../cloud/operators/test_bigquery_dts_system.py    |   2 +
 .../google/cloud/operators/test_bigtable.py        |   1 +
 .../google/cloud/operators/test_bigtable_system.py |   1 +
 .../google/cloud/operators/test_cloud_build.py     |   6 +-
 .../google/cloud/operators/test_cloud_composer.py  |   2 +
 .../cloud/operators/test_cloud_composer_system.py  |   2 +
 .../cloud/operators/test_cloud_memorystore.py      |   2 +
 .../google/cloud/operators/test_cloud_sql.py       |   2 +-
 .../test_cloud_storage_transfer_service.py         |   2 +
 .../test_cloud_storage_transfer_service_system.py  |   2 +
 .../google/cloud/operators/test_compute.py         |   2 +-
 .../google/cloud/operators/test_compute_system.py  |   2 +
 .../cloud/operators/test_compute_system_helper.py  |   2 +
 .../google/cloud/operators/test_datacatalog.py     |  23 +-
 .../google/cloud/operators/test_dataflow.py        |   3 +-
 .../google/cloud/operators/test_dataflow_system.py |   2 +
 .../google/cloud/operators/test_dataform.py        |   1 +
 .../google/cloud/operators/test_datafusion.py      |   1 +
 .../cloud/operators/test_datafusion_system.py      |   2 +
 .../google/cloud/operators/test_dataplex.py        |   1 +
 .../google/cloud/operators/test_dataplex_system.py |   2 +
 .../google/cloud/operators/test_dataprep.py        |   2 +
 .../google/cloud/operators/test_dataprep_system.py |   2 +
 .../google/cloud/operators/test_dataproc.py        |   1 +
 .../cloud/operators/test_dataproc_metastore.py     |   5 +-
 .../operators/test_dataproc_metastore_system.py    |   2 +
 .../google/cloud/operators/test_datastore.py       |   1 +
 .../cloud/operators/test_datastore_system.py       |   2 +
 tests/providers/google/cloud/operators/test_dlp.py |   3 +-
 .../google/cloud/operators/test_dlp_system.py      |   4 +-
 .../google/cloud/operators/test_functions.py       |   1 +
 .../cloud/operators/test_functions_system.py       |   2 +
 tests/providers/google/cloud/operators/test_gcs.py |   1 +
 .../cloud/operators/test_kubernetes_engine.py      |   2 +
 .../google/cloud/operators/test_life_sciences.py   |   1 +
 .../google/cloud/operators/test_looker.py          |   1 +
 .../google/cloud/operators/test_mlengine.py        |   1 +
 .../google/cloud/operators/test_mlengine_system.py |   2 +
 .../google/cloud/operators/test_mlengine_utils.py  |   1 +
 .../cloud/operators/test_natural_language.py       |   3 +-
 .../google/cloud/operators/test_pubsub.py          |   7 +-
 .../google/cloud/operators/test_spanner.py         |   2 +
 .../google/cloud/operators/test_speech_to_text.py  |   1 +
 .../google/cloud/operators/test_stackdriver.py     |   1 +
 .../providers/google/cloud/operators/test_tasks.py |   1 +
 .../google/cloud/operators/test_text_to_speech.py  |   1 +
 .../google/cloud/operators/test_translate.py       |   1 +
 .../cloud/operators/test_translate_speech.py       |   1 +
 .../google/cloud/operators/test_vertex_ai.py       |   8 +-
 .../cloud/operators/test_vertex_ai_system.py       |   2 +
 .../cloud/operators/test_video_intelligence.py     |   1 +
 .../google/cloud/operators/test_vision.py          |   1 +
 .../google/cloud/operators/test_vision_system.py   |   2 +
 .../google/cloud/operators/test_workflows.py       |   2 +
 .../google/cloud/secrets/test_secret_manager.py    |   1 +
 .../cloud/secrets/test_secret_manager_system.py    |   1 +
 .../google/cloud/sensors/test_bigquery.py          |   1 +
 .../google/cloud/sensors/test_bigquery_dts.py      |   1 +
 .../google/cloud/sensors/test_bigtable.py          |   1 +
 .../sensors/test_cloud_storage_transfer_service.py |   2 +
 .../google/cloud/sensors/test_dataflow.py          |   1 +
 .../google/cloud/sensors/test_datafusion.py        |   1 +
 .../google/cloud/sensors/test_dataplex.py          |   1 +
 .../google/cloud/sensors/test_dataproc.py          |   1 +
 tests/providers/google/cloud/sensors/test_gcs.py   |   2 +
 .../providers/google/cloud/sensors/test_looker.py  |   1 +
 .../providers/google/cloud/sensors/test_pubsub.py  |   7 +-
 tests/providers/google/cloud/sensors/test_tasks.py |   2 +
 .../google/cloud/sensors/test_workflows.py         |   1 +
 .../google/cloud/transfers/test_adls_to_gcs.py     |   1 +
 .../cloud/transfers/test_azure_fileshare_to_gcs.py |   1 +
 .../cloud/transfers/test_bigquery_to_bigquery.py   |   1 +
 .../google/cloud/transfers/test_bigquery_to_gcs.py |   1 +
 .../cloud/transfers/test_bigquery_to_mssql.py      |   2 +
 .../cloud/transfers/test_bigquery_to_mysql.py      |   2 +
 .../google/cloud/transfers/test_calendar_to_gcs.py |   1 +
 .../cloud/transfers/test_cassandra_to_gcs.py       |   1 +
 .../cloud/transfers/test_facebook_ads_to_gcs.py    |   2 +
 .../transfers/test_facebook_ads_to_gcs_system.py   |   2 +
 .../google/cloud/transfers/test_gcs_to_bigquery.py |   1 +
 .../google/cloud/transfers/test_gcs_to_gcs.py      |   1 +
 .../google/cloud/transfers/test_gcs_to_local.py    |   1 +
 .../google/cloud/transfers/test_gcs_to_sftp.py     |   1 +
 .../google/cloud/transfers/test_gdrive_to_gcs.py   |   1 +
 .../cloud/transfers/test_gdrive_to_gcs_system.py   |   2 +
 .../google/cloud/transfers/test_gdrive_to_local.py |   2 +
 .../google/cloud/transfers/test_local_to_gcs.py    |   2 +-
 .../google/cloud/transfers/test_mssql_to_gcs.py    |   1 +
 .../google/cloud/transfers/test_mysql_to_gcs.py    |   1 +
 .../google/cloud/transfers/test_oracle_to_gcs.py   |   1 +
 .../google/cloud/transfers/test_postgres_to_gcs.py |   2 +
 .../cloud/transfers/test_postgres_to_gcs_system.py |   2 +
 .../google/cloud/transfers/test_presto_to_gcs.py   |   2 +
 .../cloud/transfers/test_presto_to_gcs_system.py   |   2 +
 .../google/cloud/transfers/test_s3_to_gcs.py       |   1 +
 .../cloud/transfers/test_salesforce_to_gcs.py      |   1 +
 .../transfers/test_salesforce_to_gcs_system.py     |   1 +
 .../google/cloud/transfers/test_sftp_to_gcs.py     |   1 +
 .../cloud/transfers/test_sftp_to_gcs_system.py     |   2 +
 .../google/cloud/transfers/test_sheets_to_gcs.py   |   1 +
 .../google/cloud/transfers/test_sql_to_gcs.py      |   4 +-
 .../google/cloud/transfers/test_trino_to_gcs.py    |   2 +
 .../google/cloud/triggers/test_bigquery.py         |   7 +-
 tests/providers/google/cloud/utils/__init__.py     |   1 -
 .../providers/google/cloud/utils/base_gcp_mock.py  |   2 +
 .../google/cloud/utils/gcp_authenticator.py        |   4 +-
 .../cloud/utils/test_credentials_provider.py       |   1 +
 .../google/cloud/utils/test_field_sanitizer.py     |   1 +
 .../google/cloud/utils/test_field_validator.py     |   1 +
 tests/providers/google/cloud/utils/test_helpers.py |   1 +
 .../cloud/utils/test_mlengine_operator_utils.py    |   1 +
 .../utils/test_mlengine_prediction_summary.py      |   1 +
 .../common/auth_backend/test_google_openid.py      |   1 +
 .../google/common/hooks/test_base_google.py        |   3 +-
 .../google/common/hooks/test_discovery_api.py      |   3 +-
 .../common/utils/test_id_token_credentials.py      |   1 +
 .../google/firebase/hooks/test_firestore.py        |   3 +-
 .../google/firebase/operators/test_firestore.py    |   1 +
 .../providers/google/leveldb/hooks/test_leveldb.py |   2 +
 .../google/leveldb/operators/test_leveldb.py       |   3 +-
 .../marketing_platform/hooks/test_analytics.py     |   2 +
 .../hooks/test_campaign_manager.py                 |   2 +
 .../marketing_platform/hooks/test_display_video.py |   2 +
 .../marketing_platform/hooks/test_search_ads.py    |   2 +
 .../marketing_platform/operators/test_analytics.py |   1 +
 .../operators/test_campaign_manager.py             |   2 +
 .../operators/test_display_video.py                |   5 +-
 .../operators/test_display_video_system.py         |   2 +
 .../operators/test_search_ads.py                   |   2 +
 .../sensors/test_campaign_manager.py               |   2 +
 .../sensors/test_display_video.py                  |   1 +
 .../marketing_platform/sensors/test_search_ads.py  |   2 +
 .../providers/google/suite/hooks/test_calendar.py  |   3 +-
 tests/providers/google/suite/hooks/test_drive.py   |   3 +-
 tests/providers/google/suite/hooks/test_sheets.py  |   2 +-
 .../google/suite/operators/test_sheets.py          |   1 +
 tests/providers/google/suite/sensors/test_drive.py |   2 +-
 .../google/suite/transfers/test_gcs_to_gdrive.py   |   2 +
 .../google/suite/transfers/test_gcs_to_sheets.py   |   1 +
 .../google/suite/transfers/test_local_to_drive.py  |   1 +
 .../google/suite/transfers/test_sql_to_sheets.py   |   1 +
 tests/providers/google/test_go_module.py           |   3 +-
 tests/providers/grpc/hooks/test_grpc.py            |   1 +
 tests/providers/grpc/operators/test_grpc.py        |   1 +
 .../_internal_client/test_vault_client.py          |   1 +
 tests/providers/hashicorp/hooks/test_vault.py      |   1 +
 tests/providers/hashicorp/secrets/test_vault.py    |   1 +
 tests/providers/http/hooks/test_http.py            |   2 +
 tests/providers/http/operators/test_http.py        |   1 +
 tests/providers/http/sensors/test_http.py          |   2 +
 tests/providers/imap/hooks/test_imap.py            |   2 +
 .../providers/imap/sensors/test_imap_attachment.py |   1 +
 tests/providers/influxdb/hooks/test_influxdb.py    |   1 +
 .../providers/influxdb/operators/test_influxdb.py  |   1 +
 tests/providers/jdbc/hooks/test_jdbc.py            |   2 +-
 tests/providers/jdbc/operators/test_jdbc.py        |   1 +
 tests/providers/jenkins/hooks/test_jenkins.py      |   1 +
 .../jenkins/operators/test_jenkins_job_trigger.py  |   1 +
 tests/providers/jenkins/sensors/test_jenkins.py    |   1 +
 tests/providers/microsoft/azure/hooks/test_adx.py  |   2 +-
 tests/providers/microsoft/azure/hooks/test_asb.py  |   1 +
 .../microsoft/azure/hooks/test_azure_batch.py      |   3 +-
 .../azure/hooks/test_azure_container_instance.py   |   1 +
 .../azure/hooks/test_azure_container_registry.py   |   1 +
 .../azure/hooks/test_azure_container_volume.py     |   2 +
 .../microsoft/azure/hooks/test_azure_cosmos.py     |   3 +-
 .../azure/hooks/test_azure_data_factory.py         |   5 +-
 .../microsoft/azure/hooks/test_azure_data_lake.py  |   3 +-
 .../microsoft/azure/hooks/test_azure_fileshare.py  |   2 +-
 .../microsoft/azure/hooks/test_azure_synapse.py    |   5 +-
 .../microsoft/azure/hooks/test_base_azure.py       |   2 +
 tests/providers/microsoft/azure/hooks/test_wasb.py |   3 +-
 .../microsoft/azure/log/test_wasb_task_handler.py  |   1 +
 .../microsoft/azure/operators/test_adls_delete.py  |   1 +
 .../microsoft/azure/operators/test_adls_list.py    |   1 +
 .../microsoft/azure/operators/test_adx.py          |   2 +-
 .../microsoft/azure/operators/test_asb.py          |   1 +
 .../microsoft/azure/operators/test_azure_batch.py  |   3 +-
 .../operators/test_azure_container_instances.py    |   2 +-
 .../microsoft/azure/operators/test_azure_cosmos.py |   3 +-
 .../azure/operators/test_azure_data_factory.py     |   1 +
 .../azure/operators/test_azure_synapse.py          |   1 +
 .../azure/operators/test_wasb_delete_blob.py       |   2 +-
 .../azure/secrets/test_azure_key_vault.py          |   1 +
 .../microsoft/azure/sensors/test_azure_cosmos.py   |   1 +
 .../azure/sensors/test_azure_data_factory.py       |   1 +
 .../providers/microsoft/azure/sensors/test_wasb.py |   2 +-
 .../azure/transfers/test_azure_blob_to_gcs.py      |   2 +
 .../azure/transfers/test_local_to_adls.py          |   1 +
 .../azure/transfers/test_local_to_wasb.py          |   2 +-
 .../transfers/test_oracle_to_azure_data_lake.py    |   1 +
 .../microsoft/azure/transfers/test_sftp_to_wasb.py |   2 +-
 .../providers/microsoft/mssql/hooks/test_mssql.py  |   1 +
 .../microsoft/mssql/operators/test_mssql.py        |   1 +
 tests/providers/microsoft/psrp/hooks/test_psrp.py  |   2 +-
 .../microsoft/psrp/operators/test_psrp.py          |   5 +-
 .../providers/microsoft/winrm/hooks/test_winrm.py  |   2 +-
 .../microsoft/winrm/operators/test_winrm.py        |   1 +
 tests/providers/mongo/hooks/test_mongo.py          |   5 +-
 tests/providers/mongo/sensors/test_mongo.py        |   2 +-
 tests/providers/mysql/hooks/test_mysql.py          |   2 +-
 tests/providers/mysql/operators/test_mysql.py      |   2 +
 .../mysql/transfers/test_presto_to_mysql.py        |   2 +
 .../providers/mysql/transfers/test_s3_to_mysql.py  |   1 +
 .../mysql/transfers/test_trino_to_mysql.py         |   2 +
 .../mysql/transfers/test_vertica_to_mysql.py       |   1 +
 tests/providers/neo4j/hooks/test_neo4j.py          |   3 +-
 tests/providers/neo4j/operators/test_neo4j.py      |   1 +
 tests/providers/odbc/hooks/test_odbc.py            |   3 +-
 tests/providers/openfaas/hooks/test_openfaas.py    |   2 +-
 tests/providers/opsgenie/hooks/test_opsgenie.py    |   3 +-
 .../providers/opsgenie/operators/test_opsgenie.py  |   2 +-
 tests/providers/oracle/hooks/test_oracle.py        |   1 +
 tests/providers/oracle/operators/test_oracle.py    |   1 +
 .../oracle/transfers/test_oracle_to_oracle.py      |   1 +
 tests/providers/pagerduty/hooks/test_pagerduty.py  |   2 +
 .../pagerduty/hooks/test_pagerduty_events.py       |   2 +
 .../papermill/operators/test_papermill.py          |   2 +
 tests/providers/plexus/hooks/test_plexus.py        |   1 +
 tests/providers/plexus/operators/test_job.py       |   1 +
 tests/providers/postgres/hooks/test_postgres.py    |   3 +-
 .../providers/postgres/operators/test_postgres.py  |   1 +
 tests/providers/presto/hooks/test_presto.py        |   3 +-
 .../providers/presto/transfers/test_gcs_presto.py  |   2 +
 tests/providers/qubole/hooks/test_qubole.py        |   3 +-
 tests/providers/qubole/hooks/test_qubole_check.py  |   3 +-
 tests/providers/qubole/operators/test_qubole.py    |   2 +-
 .../qubole/operators/test_qubole_check.py          |   3 +-
 tests/providers/qubole/sensors/test_qubole.py      |   2 +-
 tests/providers/redis/hooks/test_redis.py          |   2 +-
 .../redis/operators/test_redis_publish.py          |   2 +-
 tests/providers/redis/sensors/test_redis_key.py    |   2 +-
 .../providers/redis/sensors/test_redis_pub_sub.py  |   2 +-
 .../providers/salesforce/hooks/test_salesforce.py  |   2 +-
 tests/providers/salesforce/operators/test_bulk.py  |   1 +
 .../operators/test_salesforce_apex_rest.py         |   1 +
 tests/providers/samba/hooks/test_samba.py          |   1 +
 tests/providers/segment/hooks/test_segment.py      |   3 +-
 .../segment/operators/test_segment_track_event.py  |   3 +-
 tests/providers/sendgrid/utils/test_emailer.py     |   2 +-
 tests/providers/sftp/hooks/test_sftp.py            |   2 +
 tests/providers/sftp/operators/test_sftp.py        |   2 +
 tests/providers/sftp/sensors/test_sftp.py          |   1 +
 .../singularity/operators/test_singularity.py      |   1 +
 tests/providers/slack/hooks/test_slack.py          |   5 +-
 tests/providers/slack/hooks/test_slack_webhook.py  |   3 +-
 tests/providers/slack/operators/test_slack.py      |   2 +
 .../slack/operators/test_slack_webhook.py          |   2 +-
 .../providers/slack/transfers/test_sql_to_slack.py |   1 +
 tests/providers/slack/utils/test_utils.py          |   1 +
 tests/providers/snowflake/hooks/test_snowflake.py  |   7 +-
 .../snowflake/operators/test_snowflake.py          |   1 +
 .../transfers/test_copy_into_snowflake.py          |   1 +
 .../snowflake/transfers/test_s3_to_snowflake.py    |   1 +
 .../snowflake/transfers/test_snowflake_to_slack.py |   1 +
 tests/providers/snowflake/utils/test_common.py     |   1 +
 tests/providers/sqlite/hooks/test_sqlite.py        |   2 +-
 tests/providers/sqlite/operators/test_sqlite.py    |   1 +
 tests/providers/ssh/hooks/test_ssh.py              |   5 +-
 tests/providers/ssh/operators/test_ssh.py          |   1 +
 tests/providers/tableau/hooks/test_tableau.py      |   1 +
 tests/providers/tableau/operators/test_tableau.py  |   1 +
 .../operators/test_tableau_refresh_workbook.py     |   1 +
 tests/providers/tableau/sensors/test_tableau.py    |   1 +
 tests/providers/tabular/hooks/test_tabular.py      |   1 +
 tests/providers/telegram/hooks/test_telegram.py    |   1 +
 .../providers/telegram/operators/test_telegram.py  |   2 +
 tests/providers/trino/hooks/test_trino.py          |   3 +-
 tests/providers/trino/operators/test_trino.py      |   2 +
 tests/providers/trino/transfers/test_gcs_trino.py  |   2 +
 tests/providers/vertica/hooks/test_vertica.py      |   2 +-
 tests/providers/vertica/operators/test_vertica.py  |   1 +
 tests/providers/yandex/hooks/test_yandex.py        |   1 +
 .../yandex/hooks/test_yandexcloud_dataproc.py      |   2 +
 .../yandex/operators/test_yandexcloud_dataproc.py  |   2 +-
 tests/providers/zendesk/hooks/test_zendesk.py      |   3 +-
 .../airbyte/example_airbyte_trigger_job.py         |   2 +-
 .../system/providers/alibaba/example_oss_bucket.py |   1 +
 .../system/providers/alibaba/example_oss_object.py |   2 +
 .../system/providers/amazon/aws/example_athena.py  |   2 +
 tests/system/providers/amazon/aws/example_batch.py |   2 +
 .../providers/amazon/aws/example_cloudformation.py |   2 +
 .../providers/amazon/aws/example_datasync.py       |   1 +
 .../providers/amazon/aws/example_ecs_fargate.py    |   2 +
 .../providers/amazon/aws/example_emr_serverless.py |   2 +-
 tests/system/providers/amazon/aws/example_glue.py  |   5 +-
 .../system/providers/amazon/aws/example_lambda.py  |   5 +-
 .../providers/amazon/aws/example_local_to_s3.py    |   2 +
 .../providers/amazon/aws/example_quicksight.py     |   2 +
 .../providers/amazon/aws/example_rds_event.py      |   1 +
 .../providers/amazon/aws/example_rds_export.py     |   1 +
 .../providers/amazon/aws/example_rds_snapshot.py   |   1 +
 .../amazon/aws/example_redshift_cluster.py         |   1 +
 .../aws/example_redshift_data_execute_sql.py       |   1 +
 .../providers/amazon/aws/example_redshift_sql.py   |   1 +
 .../providers/amazon/aws/example_redshift_to_s3.py |   1 +
 .../providers/amazon/aws/example_sagemaker.py      |   2 +
 .../amazon/aws/example_sagemaker_endpoint.py       |   2 +
 tests/system/providers/amazon/aws/example_sns.py   |   2 +
 tests/system/providers/amazon/aws/example_sqs.py   |   2 +
 .../providers/amazon/aws/example_step_functions.py |   2 +
 .../amazon/aws/rds/example_rds_instance.py         |   1 +
 .../system/providers/amazon/aws/utils/__init__.py  |  16 +-
 tests/system/providers/apache/beam/example_beam.py |   2 +-
 .../apache/beam/example_beam_java_flink.py         |   2 +-
 .../apache/beam/example_beam_java_spark.py         |   2 +-
 tests/system/providers/apache/beam/example_go.py   |   2 +-
 .../providers/apache/beam/example_go_dataflow.py   |   2 +-
 .../providers/apache/beam/example_java_dataflow.py |   2 +-
 .../system/providers/apache/beam/example_python.py |   2 +-
 .../apache/beam/example_python_dataflow.py         |   2 +-
 tests/system/providers/apache/beam/utils.py        |   3 +-
 .../apache/cassandra/example_cassandra_dag.py      |   9 +-
 .../providers/apache/drill/example_drill_dag.py    |   2 +-
 .../providers/apache/druid/example_druid_dag.py    |   2 +-
 .../providers/apache/hive/example_twitter_dag.py   |  19 +-
 .../providers/apache/kylin/example_kylin_dag.py    |   2 +-
 tests/system/providers/apache/livy/example_livy.py |   2 +-
 tests/system/providers/apache/pig/example_pig.py   |   2 +-
 .../providers/apache/spark/example_spark_dag.py    |   2 +-
 tests/system/providers/asana/example_asana.py      |  10 +-
 .../cncf/kubernetes/example_kubernetes.py          |   1 +
 .../kubernetes/example_kubernetes_decorator.py     |   1 +
 .../cncf/kubernetes/example_spark_kubernetes.py    |   2 +
 .../common/sql/example_sql_column_table_check.py   |   2 +
 .../providers/databricks/example_databricks.py     |   1 +
 .../databricks/example_databricks_repos.py         |   1 +
 .../providers/databricks/example_databricks_sql.py |   1 +
 .../providers/dbt/cloud/example_dbt_cloud.py       |   1 +
 .../system/providers/dingding/example_dingding.py  |   2 +
 tests/system/providers/docker/example_docker.py    |   2 +
 .../providers/docker/example_docker_copy_data.py   |   3 +-
 .../providers/docker/example_docker_swarm.py       |   2 +
 .../example_taskflow_api_docker_virtualenv.py      |   2 +-
 .../elasticsearch/example_elasticsearch_query.py   |   3 +-
 tests/system/providers/github/example_github.py    |   5 +-
 tests/system/providers/google/ads/example_ads.py   |   2 +
 .../automl/example_automl_nl_text_extraction.py    |   3 +-
 .../automl/example_automl_vision_classification.py |   3 +-
 .../cloud/azure/example_azure_fileshare_to_gcs.py  |   2 +
 .../cloud/bigquery/example_bigquery_dataset.py     |   3 +-
 .../cloud/bigquery/example_bigquery_operations.py  |   3 +-
 .../example_bigquery_operations_location.py        |   3 +-
 .../cloud/bigquery/example_bigquery_queries.py     |   3 +-
 .../bigquery/example_bigquery_queries_async.py     |   4 +-
 .../cloud/bigquery/example_bigquery_sensors.py     |   3 +-
 .../cloud/bigquery/example_bigquery_tables.py      |   3 +-
 .../cloud/bigquery/example_bigquery_to_bigquery.py |   3 +-
 .../cloud/bigquery/example_bigquery_to_gcs.py      |   3 +-
 .../cloud/bigquery/example_bigquery_to_mssql.py    |   3 +-
 .../cloud/bigquery/example_bigquery_transfer.py    |   3 +-
 .../cloud/cloud_build/example_cloud_build.py       |   7 +-
 .../cloud_build/example_cloud_build_trigger.py     |   6 +-
 .../example_cloud_memorystore_memcached.py         |   2 +
 .../example_cloud_memorystore_redis.py             |   2 +
 .../google/cloud/cloud_sql/example_cloud_sql.py    |   2 +-
 .../cloud/compute_igm/example_compute_igm.py       |   2 +-
 .../google/cloud/dataform/example_dataform.py      |   3 +-
 .../cloud/dataproc/example_dataproc_batch.py       |   2 +-
 .../dataproc/example_dataproc_batch_persistent.py  |   2 +-
 .../dataproc/example_dataproc_cluster_generator.py |   1 +
 .../google/cloud/dataproc/example_dataproc_gke.py  |   1 +
 .../cloud/dataproc/example_dataproc_hadoop.py      |   1 +
 .../google/cloud/dataproc/example_dataproc_hive.py |   1 +
 .../google/cloud/dataproc/example_dataproc_pig.py  |   1 +
 .../cloud/dataproc/example_dataproc_pyspark.py     |   1 +
 .../cloud/dataproc/example_dataproc_spark.py       |   1 +
 .../cloud/dataproc/example_dataproc_spark_async.py |   1 +
 .../dataproc/example_dataproc_spark_deferrable.py  |   1 +
 .../cloud/dataproc/example_dataproc_spark_sql.py   |   1 +
 .../cloud/dataproc/example_dataproc_sparkr.py      |   1 +
 .../cloud/dataproc/example_dataproc_update.py      |   1 +
 .../cloud/dataproc/example_dataproc_workflow.py    |   2 +-
 .../google/cloud/dataproc/resources/hello_world.py |   1 +
 .../cloud/datastore/example_datastore_commit.py    |   6 +-
 .../datastore/example_datastore_export_import.py   |   2 +-
 .../cloud/datastore/example_datastore_query.py     |   6 +-
 .../cloud/datastore/example_datastore_rollback.py  |   6 +-
 .../google/cloud/gcs/example_calendar_to_gcs.py    |   1 +
 .../google/cloud/gcs/example_firestore.py          |   2 +-
 .../providers/google/cloud/gcs/example_gcs_acl.py  |   1 +
 .../google/cloud/gcs/example_gcs_copy_delete.py    |   1 +
 .../google/cloud/gcs/example_gcs_sensor.py         |   1 +
 .../google/cloud/gcs/example_gcs_to_bigquery.py    |   2 +-
 .../google/cloud/gcs/example_gcs_to_gcs.py         |   1 +
 .../google/cloud/gcs/example_gcs_to_gdrive.py      |   2 +
 .../google/cloud/gcs/example_gcs_to_sheets.py      |   1 +
 .../google/cloud/gcs/example_gcs_transform.py      |   1 +
 .../cloud/gcs/example_gcs_transform_timespan.py    |   1 +
 .../cloud/gcs/example_gcs_upload_download.py       |   1 +
 .../google/cloud/gcs/example_mssql_to_gcs.py       |   2 +
 .../google/cloud/gcs/example_mysql_to_gcs.py       |   1 +
 .../google/cloud/gcs/example_oracle_to_gcs.py      |   1 +
 .../google/cloud/gcs/example_s3_to_gcs.py          |   1 +
 .../providers/google/cloud/gcs/example_sheets.py   |   1 +
 .../google/cloud/gcs/example_sheets_to_gcs.py      |   1 +
 .../google/cloud/gcs/example_trino_to_gcs.py       |   2 +
 .../google/cloud/gcs/resources/transform_script.py |   1 +
 .../cloud/gcs/resources/transform_timespan.py      |   1 +
 .../kubernetes_engine/example_kubernetes_engine.py |   1 +
 .../cloud/life_sciences/example_life_sciences.py   |   1 +
 .../natural_language/example_natural_language.py   |   3 +-
 .../google/cloud/pubsub/example_pubsub.py          |   3 +-
 .../google/cloud/spanner/example_spanner.py        |   2 +-
 .../cloud/speech_to_text/example_speech_to_text.py |   1 +
 .../cloud/sql_to_sheets/example_sql_to_sheets.py   |   2 +
 .../cloud/stackdriver/example_stackdriver.py       |   2 +-
 .../providers/google/cloud/tasks/example_queue.py  |   3 +-
 .../providers/google/cloud/tasks/example_tasks.py  |   3 +-
 .../cloud/text_to_speech/example_text_to_speech.py |   1 +
 .../google/cloud/transfers/example_gcs_to_sftp.py  |   1 +
 .../google/cloud/translate/example_translate.py    |   4 +-
 .../translate_speech/example_translate_speech.py   |   1 +
 .../example_video_intelligence.py                  |   3 +-
 .../google/cloud/workflows/example_workflows.py    |   1 +
 tests/system/providers/google/conftest.py          |   1 +
 .../datacatalog/example_datacatalog_entries.py     |   1 +
 .../example_datacatalog_search_catalog.py          |   1 +
 .../example_datacatalog_tag_templates.py           |   1 +
 .../google/datacatalog/example_datacatalog_tags.py |   1 +
 .../providers/google/leveldb/example_leveldb.py    |   1 +
 .../google/marketing_platform/example_analytics.py |   2 +
 .../marketing_platform/example_campaign_manager.py |   2 +
 .../marketing_platform/example_search_ads.py       |   2 +
 tests/system/providers/http/example_http.py        |   2 +-
 .../system/providers/influxdb/example_influxdb.py  |   1 +
 .../providers/influxdb/example_influxdb_query.py   |   1 +
 .../system/providers/jdbc/example_jdbc_queries.py  |   3 +-
 .../jenkins/example_jenkins_job_trigger.py         |   2 +
 .../microsoft/azure/example_adf_run_pipeline.py    |   2 +
 .../microsoft/azure/example_adls_delete.py         |   1 +
 .../microsoft/azure/example_azure_blob_to_gcs.py   |   8 +-
 .../azure/example_azure_container_instances.py     |   2 +
 .../microsoft/azure/example_azure_cosmosdb.py      |  10 +-
 .../microsoft/azure/example_azure_service_bus.py   |   1 +
 .../microsoft/azure/example_azure_synapse.py       |   1 +
 .../providers/microsoft/azure/example_fileshare.py |   2 +
 .../microsoft/azure/example_local_to_adls.py       |   1 +
 .../microsoft/azure/example_local_to_wasb.py       |   8 +-
 .../microsoft/azure/example_sftp_to_wasb.py        |   2 +
 .../providers/microsoft/mssql/example_mssql.py     |   2 +
 .../providers/microsoft/winrm/example_winrm.py     |  17 +-
 tests/system/providers/mysql/example_mysql.py      |   2 +
 tests/system/providers/neo4j/example_neo4j.py      |   2 +
 .../providers/opsgenie/example_opsgenie_alert.py   |   2 +
 .../providers/papermill/example_papermill.py       |   2 +
 .../papermill/example_papermill_verify.py          |   2 +
 tests/system/providers/plexus/example_plexus.py    |   1 +
 .../system/providers/postgres/example_postgres.py  |   2 +
 .../providers/presto/example_gcs_to_presto.py      |   1 +
 tests/system/providers/qubole/example_qubole.py    |   1 +
 .../providers/qubole/example_qubole_sensors.py     |   1 +
 tests/system/providers/salesforce/example_bulk.py  |   2 +
 .../salesforce/example_salesforce_apex_rest.py     |   2 +
 .../providers/singularity/example_singularity.py   |   1 +
 tests/system/providers/slack/example_slack.py      |   1 +
 .../system/providers/slack/example_sql_to_slack.py |   1 +
 .../providers/snowflake/example_snowflake.py       |   2 +
 tests/system/providers/sqlite/example_sqlite.py    |   1 +
 tests/system/providers/tableau/example_tableau.py  |   2 +
 .../tableau/example_tableau_refresh_workbook.py    |   2 +
 tests/system/providers/tabular/example_tabular.py  |   1 +
 .../system/providers/telegram/example_telegram.py  |   1 +
 .../system/providers/trino/example_gcs_to_trino.py |   1 +
 tests/system/providers/trino/example_trino.py      |   1 +
 .../system/providers/yandex/example_yandexcloud.py |  23 +-
 .../yandex/example_yandexcloud_dataproc.py         |   2 +
 .../example_yandexcloud_dataproc_lightweight.py    |   2 +
 .../zendesk/example_zendesk_custom_get.py          |   4 +-
 1476 files changed, 10993 insertions(+), 9529 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 8b6cd16b8e..eb4a592e5c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -213,7 +213,9 @@ repos:
       - id: pyupgrade
         name: Upgrade Python code automatically
         args: ["--py37-plus"]
-        exclude: ^airflow/_vendor/
+        # We need to exclude gcs hook from pyupgrade because it has public "list" command which clashes
+        # with `list` that is used as type
+        exclude: ^airflow/_vendor/|^airflow/providers/google/cloud/hooks/gcs.py$
   - repo: https://github.com/pre-commit/pygrep-hooks
     rev: v1.9.0
     hooks:
diff --git a/airflow/providers/airbyte/hooks/airbyte.py b/airflow/providers/airbyte/hooks/airbyte.py
index ab0d7e4baf..8d6da4d2ba 100644
--- a/airflow/providers/airbyte/hooks/airbyte.py
+++ b/airflow/providers/airbyte/hooks/airbyte.py
@@ -15,8 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import time
-from typing import Any, Optional, Union
+from typing import Any
 
 from airflow.exceptions import AirflowException
 from airflow.providers.http.hooks.http import HttpHook
@@ -48,9 +50,7 @@ class AirbyteHook(HttpHook):
         super().__init__(http_conn_id=airbyte_conn_id)
         self.api_version: str = api_version
 
-    def wait_for_job(
-        self, job_id: Union[str, int], wait_seconds: float = 3, timeout: Optional[float] = 3600
-    ) -> None:
+    def wait_for_job(self, job_id: str | int, wait_seconds: float = 3, timeout: float | None = 3600) -> None:
         """
         Helper method which polls a job to check if it finishes.
 
diff --git a/airflow/providers/airbyte/operators/airbyte.py b/airflow/providers/airbyte/operators/airbyte.py
index 7677795a6a..8f38cf281d 100644
--- a/airflow/providers/airbyte/operators/airbyte.py
+++ b/airflow/providers/airbyte/operators/airbyte.py
@@ -15,7 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.airbyte.hooks.airbyte import AirbyteHook
@@ -51,10 +53,10 @@ class AirbyteTriggerSyncOperator(BaseOperator):
         self,
         connection_id: str,
         airbyte_conn_id: str = "airbyte_default",
-        asynchronous: Optional[bool] = False,
+        asynchronous: bool | None = False,
         api_version: str = "v1",
         wait_seconds: float = 3,
-        timeout: Optional[float] = 3600,
+        timeout: float | None = 3600,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -65,7 +67,7 @@ class AirbyteTriggerSyncOperator(BaseOperator):
         self.wait_seconds = wait_seconds
         self.asynchronous = asynchronous
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         """Create Airbyte Job and wait to finish"""
         self.hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id, api_version=self.api_version)
         job_object = self.hook.submit_sync_connection(connection_id=self.connection_id)
diff --git a/airflow/providers/airbyte/sensors/airbyte.py b/airflow/providers/airbyte/sensors/airbyte.py
index 10c5954ee3..aec4ab4773 100644
--- a/airflow/providers/airbyte/sensors/airbyte.py
+++ b/airflow/providers/airbyte/sensors/airbyte.py
@@ -16,6 +16,8 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module contains a Airbyte Job sensor."""
+from __future__ import annotations
+
 from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException
@@ -52,7 +54,7 @@ class AirbyteJobSensor(BaseSensorOperator):
         self.airbyte_job_id = airbyte_job_id
         self.api_version = api_version
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id, api_version=self.api_version)
         job = hook.get_job(job_id=self.airbyte_job_id)
         status = job.json()['job']['status']
diff --git a/airflow/providers/alibaba/cloud/hooks/oss.py b/airflow/providers/alibaba/cloud/hooks/oss.py
index 08272adb25..63f2a9b620 100644
--- a/airflow/providers/alibaba/cloud/hooks/oss.py
+++ b/airflow/providers/alibaba/cloud/hooks/oss.py
@@ -15,9 +15,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from functools import wraps
 from inspect import signature
-from typing import TYPE_CHECKING, Callable, Optional, TypeVar, cast
+from typing import TYPE_CHECKING, Callable, TypeVar, cast
 from urllib.parse import urlparse
 
 import oss2
@@ -88,13 +90,13 @@ class OSSHook(BaseHook):
     conn_type = 'oss'
     hook_name = 'OSS'
 
-    def __init__(self, region: Optional[str] = None, oss_conn_id='oss_default', *args, **kwargs) -> None:
+    def __init__(self, region: str | None = None, oss_conn_id='oss_default', *args, **kwargs) -> None:
         self.oss_conn_id = oss_conn_id
         self.oss_conn = self.get_connection(oss_conn_id)
         self.region = self.get_default_region() if region is None else region
         super().__init__(*args, **kwargs)
 
-    def get_conn(self) -> "Connection":
+    def get_conn(self) -> Connection:
         """Returns connection for the hook."""
         return self.oss_conn
 
@@ -118,7 +120,7 @@ class OSSHook(BaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def object_exists(self, key: str, bucket_name: Optional[str] = None) -> bool:
+    def object_exists(self, key: str, bucket_name: str | None = None) -> bool:
         """
         Check if object exists.
 
@@ -134,7 +136,7 @@ class OSSHook(BaseHook):
             return False
 
     @provide_bucket_name
-    def get_bucket(self, bucket_name: Optional[str] = None) -> oss2.api.Bucket:
+    def get_bucket(self, bucket_name: str | None = None) -> oss2.api.Bucket:
         """
         Returns a oss2.Bucket object
 
@@ -148,7 +150,7 @@ class OSSHook(BaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def load_string(self, key: str, content: str, bucket_name: Optional[str] = None) -> None:
+    def load_string(self, key: str, content: str, bucket_name: str | None = None) -> None:
         """
         Loads a string to OSS
 
@@ -167,7 +169,7 @@ class OSSHook(BaseHook):
         self,
         key: str,
         file: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
     ) -> None:
         """
         Upload a local file to OSS
@@ -187,8 +189,8 @@ class OSSHook(BaseHook):
         self,
         key: str,
         local_file: str,
-        bucket_name: Optional[str] = None,
-    ) -> Optional[str]:
+        bucket_name: str | None = None,
+    ) -> str | None:
         """
         Download file from OSS
 
@@ -210,7 +212,7 @@ class OSSHook(BaseHook):
     def delete_object(
         self,
         key: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
     ) -> None:
         """
         Delete object from OSS
@@ -229,7 +231,7 @@ class OSSHook(BaseHook):
     def delete_objects(
         self,
         key: list,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
     ) -> None:
         """
         Delete objects from OSS
@@ -246,7 +248,7 @@ class OSSHook(BaseHook):
     @provide_bucket_name
     def delete_bucket(
         self,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
     ) -> None:
         """
         Delete bucket from OSS
@@ -262,7 +264,7 @@ class OSSHook(BaseHook):
     @provide_bucket_name
     def create_bucket(
         self,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
     ) -> None:
         """
         Create bucket
@@ -277,7 +279,7 @@ class OSSHook(BaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def append_string(self, bucket_name: Optional[str], content: str, key: str, pos: int) -> None:
+    def append_string(self, bucket_name: str | None, content: str, key: str, pos: int) -> None:
         """
         Append string to a remote existing file
 
@@ -295,7 +297,7 @@ class OSSHook(BaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def read_key(self, bucket_name: Optional[str], key: str) -> str:
+    def read_key(self, bucket_name: str | None, key: str) -> str:
         """
         Read oss remote object content with the specified key
 
@@ -311,7 +313,7 @@ class OSSHook(BaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def head_key(self, bucket_name: Optional[str], key: str) -> oss2.models.HeadObjectResult:
+    def head_key(self, bucket_name: str | None, key: str) -> oss2.models.HeadObjectResult:
         """
         Get meta info of the specified remote object
 
@@ -327,7 +329,7 @@ class OSSHook(BaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def key_exist(self, bucket_name: Optional[str], key: str) -> bool:
+    def key_exist(self, bucket_name: str | None, key: str) -> bool:
         """
         Find out whether the specified key exists in the oss remote storage
 
@@ -360,7 +362,7 @@ class OSSHook(BaseHook):
 
         return oss2.Auth(oss_access_key_id, oss_access_key_secret)
 
-    def get_default_region(self) -> Optional[str]:
+    def get_default_region(self) -> str | None:
         extra_config = self.oss_conn.extra_dejson
         auth_type = extra_config.get('auth_type', None)
         if not auth_type:
diff --git a/airflow/providers/alibaba/cloud/log/oss_task_handler.py b/airflow/providers/alibaba/cloud/log/oss_task_handler.py
index d404a47e4a..924046145b 100644
--- a/airflow/providers/alibaba/cloud/log/oss_task_handler.py
+++ b/airflow/providers/alibaba/cloud/log/oss_task_handler.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import contextlib
 import os
 import pathlib
diff --git a/airflow/providers/alibaba/cloud/operators/oss.py b/airflow/providers/alibaba/cloud/operators/oss.py
index 8ec9b4b139..c6614c7c6b 100644
--- a/airflow/providers/alibaba/cloud/operators/oss.py
+++ b/airflow/providers/alibaba/cloud/operators/oss.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains Alibaba Cloud OSS operators."""
-from typing import TYPE_CHECKING, Optional
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
 
 from airflow.models import BaseOperator
 from airflow.providers.alibaba.cloud.hooks.oss import OSSHook
@@ -38,7 +39,7 @@ class OSSCreateBucketOperator(BaseOperator):
     def __init__(
         self,
         region: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         oss_conn_id: str = 'oss_default',
         **kwargs,
     ) -> None:
@@ -47,7 +48,7 @@ class OSSCreateBucketOperator(BaseOperator):
         self.region = region
         self.bucket_name = bucket_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         oss_hook = OSSHook(oss_conn_id=self.oss_conn_id, region=self.region)
         oss_hook.create_bucket(bucket_name=self.bucket_name)
 
@@ -64,7 +65,7 @@ class OSSDeleteBucketOperator(BaseOperator):
     def __init__(
         self,
         region: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         oss_conn_id: str = 'oss_default',
         **kwargs,
     ) -> None:
@@ -73,7 +74,7 @@ class OSSDeleteBucketOperator(BaseOperator):
         self.region = region
         self.bucket_name = bucket_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         oss_hook = OSSHook(oss_conn_id=self.oss_conn_id, region=self.region)
         oss_hook.delete_bucket(bucket_name=self.bucket_name)
 
@@ -94,7 +95,7 @@ class OSSUploadObjectOperator(BaseOperator):
         key: str,
         file: str,
         region: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         oss_conn_id: str = 'oss_default',
         **kwargs,
     ) -> None:
@@ -105,7 +106,7 @@ class OSSUploadObjectOperator(BaseOperator):
         self.region = region
         self.bucket_name = bucket_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         oss_hook = OSSHook(oss_conn_id=self.oss_conn_id, region=self.region)
         oss_hook.upload_local_file(bucket_name=self.bucket_name, key=self.key, file=self.file)
 
@@ -126,7 +127,7 @@ class OSSDownloadObjectOperator(BaseOperator):
         key: str,
         file: str,
         region: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         oss_conn_id: str = 'oss_default',
         **kwargs,
     ) -> None:
@@ -137,7 +138,7 @@ class OSSDownloadObjectOperator(BaseOperator):
         self.region = region
         self.bucket_name = bucket_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         oss_hook = OSSHook(oss_conn_id=self.oss_conn_id, region=self.region)
         oss_hook.download_file(bucket_name=self.bucket_name, key=self.key, local_file=self.file)
 
@@ -156,7 +157,7 @@ class OSSDeleteBatchObjectOperator(BaseOperator):
         self,
         keys: list,
         region: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         oss_conn_id: str = 'oss_default',
         **kwargs,
     ) -> None:
@@ -166,7 +167,7 @@ class OSSDeleteBatchObjectOperator(BaseOperator):
         self.region = region
         self.bucket_name = bucket_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         oss_hook = OSSHook(oss_conn_id=self.oss_conn_id, region=self.region)
         oss_hook.delete_objects(bucket_name=self.bucket_name, key=self.keys)
 
@@ -185,7 +186,7 @@ class OSSDeleteObjectOperator(BaseOperator):
         self,
         key: str,
         region: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         oss_conn_id: str = 'oss_default',
         **kwargs,
     ) -> None:
@@ -195,6 +196,6 @@ class OSSDeleteObjectOperator(BaseOperator):
         self.region = region
         self.bucket_name = bucket_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         oss_hook = OSSHook(oss_conn_id=self.oss_conn_id, region=self.region)
         oss_hook.delete_object(bucket_name=self.bucket_name, key=self.key)
diff --git a/airflow/providers/alibaba/cloud/sensors/oss_key.py b/airflow/providers/alibaba/cloud/sensors/oss_key.py
index 00e897f5d6..cbc40086ab 100644
--- a/airflow/providers/alibaba/cloud/sensors/oss_key.py
+++ b/airflow/providers/alibaba/cloud/sensors/oss_key.py
@@ -15,8 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 from urllib.parse import urlparse
 
 from airflow.compat.functools import cached_property
@@ -48,8 +49,8 @@ class OSSKeySensor(BaseSensorOperator):
         self,
         bucket_key: str,
         region: str,
-        bucket_name: Optional[str] = None,
-        oss_conn_id: Optional[str] = 'oss_default',
+        bucket_name: str | None = None,
+        oss_conn_id: str | None = 'oss_default',
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -58,9 +59,9 @@ class OSSKeySensor(BaseSensorOperator):
         self.bucket_key = bucket_key
         self.region = region
         self.oss_conn_id = oss_conn_id
-        self.hook: Optional[OSSHook] = None
+        self.hook: OSSHook | None = None
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         """
         Check if the object exists in the bucket to pull key.
         @param self - the object itself
diff --git a/airflow/providers/amazon/aws/example_dags/example_appflow.py b/airflow/providers/amazon/aws/example_dags/example_appflow.py
index 155521912d..c986961499 100644
--- a/airflow/providers/amazon/aws/example_dags/example_appflow.py
+++ b/airflow/providers/amazon/aws/example_dags/example_appflow.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from datetime import datetime
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_dms.py b/airflow/providers/amazon/aws/example_dags/example_dms.py
index 0ce8a2e5e2..29bfba5a5b 100644
--- a/airflow/providers/amazon/aws/example_dags/example_dms.py
+++ b/airflow/providers/amazon/aws/example_dags/example_dms.py
@@ -19,6 +19,7 @@
 Note:  DMS requires you to configure specific IAM roles/permissions.  For more information, see
 https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.html#CHAP_Security.APIRole
 """
+from __future__ import annotations
 
 import json
 import os
diff --git a/airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py
index 5abf23d29c..844e0dec88 100644
--- a/airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_dynamodb_to_s3.py
@@ -14,6 +14,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from datetime import datetime
 from os import environ
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_ec2.py b/airflow/providers/amazon/aws/example_dags/example_ec2.py
index 5c6b04301d..7f523a1283 100644
--- a/airflow/providers/amazon/aws/example_dags/example_ec2.py
+++ b/airflow/providers/amazon/aws/example_dags/example_ec2.py
@@ -14,6 +14,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import os
 from datetime import datetime
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_ecs.py b/airflow/providers/amazon/aws/example_dags/example_ecs.py
index a953de0bc5..4439ef0ee3 100644
--- a/airflow/providers/amazon/aws/example_dags/example_ecs.py
+++ b/airflow/providers/amazon/aws/example_dags/example_ecs.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from datetime import datetime
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_eks_templated.py b/airflow/providers/amazon/aws/example_dags/example_eks_templated.py
index 0d9276e499..88c7d817cc 100644
--- a/airflow/providers/amazon/aws/example_dags/example_eks_templated.py
+++ b/airflow/providers/amazon/aws/example_dags/example_eks_templated.py
@@ -14,9 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
+from __future__ import annotations
 
 from datetime import datetime
 
@@ -31,6 +29,10 @@ from airflow.providers.amazon.aws.operators.eks import (
 )
 from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor
 
+# mypy ignore arg types (for templated fields)
+# type: ignore[arg-type]
+
+
 # Example Jinja Template format, substitute your values:
 """
 {
@@ -65,7 +67,8 @@ with DAG(
         cluster_name=CLUSTER_NAME,
         compute=None,
         cluster_role_arn="{{ dag_run.conf['cluster_role_arn'] }}",
-        resources_vpc_config="{{ dag_run.conf['resources_vpc_config'] }}",
+        # This only works with render_template_as_native_obj flag (this dag has it set)
+        resources_vpc_config="{{ dag_run.conf['resources_vpc_config'] }}",  # type: ignore[arg-type]
     )
 
     await_create_cluster = EksClusterStateSensor(
diff --git a/airflow/providers/amazon/aws/example_dags/example_eks_with_fargate_in_one_step.py b/airflow/providers/amazon/aws/example_dags/example_eks_with_fargate_in_one_step.py
index b86c3f7343..8142439082 100644
--- a/airflow/providers/amazon/aws/example_dags/example_eks_with_fargate_in_one_step.py
+++ b/airflow/providers/amazon/aws/example_dags/example_eks_with_fargate_in_one_step.py
@@ -14,9 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
+from __future__ import annotations
 
 from datetime import datetime
 from os import environ
@@ -30,6 +28,9 @@ from airflow.providers.amazon.aws.operators.eks import (
 )
 from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor
 
+# Ignore missing args provided by default_args
+# type: ignore[call-arg]
+
 CLUSTER_NAME = 'fargate-all-in-one'
 FARGATE_PROFILE_NAME = f'{CLUSTER_NAME}-profile'
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_eks_with_fargate_profile.py b/airflow/providers/amazon/aws/example_dags/example_eks_with_fargate_profile.py
index 8cb247035a..59cba76239 100644
--- a/airflow/providers/amazon/aws/example_dags/example_eks_with_fargate_profile.py
+++ b/airflow/providers/amazon/aws/example_dags/example_eks_with_fargate_profile.py
@@ -14,9 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
+from __future__ import annotations
 
 from datetime import datetime
 from os import environ
@@ -32,6 +30,9 @@ from airflow.providers.amazon.aws.operators.eks import (
 )
 from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksFargateProfileStateSensor
 
+# Ignore missing args provided by default_args
+# type: ignore[call-arg]
+
 CLUSTER_NAME = 'fargate-demo'
 FARGATE_PROFILE_NAME = f'{CLUSTER_NAME}-profile'
 SELECTORS = [{'namespace': 'default'}]
diff --git a/airflow/providers/amazon/aws/example_dags/example_eks_with_nodegroup_in_one_step.py b/airflow/providers/amazon/aws/example_dags/example_eks_with_nodegroup_in_one_step.py
index aaf84cf3ef..ce6032c37c 100644
--- a/airflow/providers/amazon/aws/example_dags/example_eks_with_nodegroup_in_one_step.py
+++ b/airflow/providers/amazon/aws/example_dags/example_eks_with_nodegroup_in_one_step.py
@@ -14,9 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
+from __future__ import annotations
 
 from datetime import datetime
 from os import environ
@@ -30,6 +28,9 @@ from airflow.providers.amazon.aws.operators.eks import (
 )
 from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor
 
+# Ignore missing args provided by default_args
+# type: ignore[call-arg]
+
 CLUSTER_NAME = environ.get('EKS_CLUSTER_NAME', 'eks-demo')
 NODEGROUP_NAME = f'{CLUSTER_NAME}-nodegroup'
 ROLE_ARN = environ.get('EKS_DEMO_ROLE_ARN', 'arn:aws:iam::123456789012:role/role_name')
diff --git a/airflow/providers/amazon/aws/example_dags/example_eks_with_nodegroups.py b/airflow/providers/amazon/aws/example_dags/example_eks_with_nodegroups.py
index eb408af043..f0d839cb39 100644
--- a/airflow/providers/amazon/aws/example_dags/example_eks_with_nodegroups.py
+++ b/airflow/providers/amazon/aws/example_dags/example_eks_with_nodegroups.py
@@ -14,9 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
+from __future__ import annotations
 
 from datetime import datetime
 from os import environ
@@ -32,6 +30,9 @@ from airflow.providers.amazon.aws.operators.eks import (
 )
 from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, EksNodegroupStateSensor
 
+# Ignore missing args provided by default_args
+# type: ignore[call-arg]
+
 CLUSTER_NAME = 'eks-demo'
 NODEGROUP_SUFFIX = '-nodegroup'
 NODEGROUP_NAME = CLUSTER_NAME + NODEGROUP_SUFFIX
diff --git a/airflow/providers/amazon/aws/example_dags/example_emr.py b/airflow/providers/amazon/aws/example_dags/example_emr.py
index 399269e64a..b244d38d82 100644
--- a/airflow/providers/amazon/aws/example_dags/example_emr.py
+++ b/airflow/providers/amazon/aws/example_dags/example_emr.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import os
 from datetime import datetime
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_emr_eks.py b/airflow/providers/amazon/aws/example_dags/example_emr_eks.py
index 413ab9e15c..d827938985 100644
--- a/airflow/providers/amazon/aws/example_dags/example_emr_eks.py
+++ b/airflow/providers/amazon/aws/example_dags/example_emr_eks.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import os
 from datetime import datetime
diff --git a/airflow/providers/amazon/aws/example_dags/example_ftp_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_ftp_to_s3.py
index 9e95400391..4c187dc1e0 100644
--- a/airflow/providers/amazon/aws/example_dags/example_ftp_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_ftp_to_s3.py
@@ -14,7 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+from __future__ import annotations
 
 import os
 from datetime import datetime
diff --git a/airflow/providers/amazon/aws/example_dags/example_gcs_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_gcs_to_s3.py
index e45089cf57..ad5c8072df 100644
--- a/airflow/providers/amazon/aws/example_dags/example_gcs_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_gcs_to_s3.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import os
 from datetime import datetime
diff --git a/airflow/providers/amazon/aws/example_dags/example_glacier_to_gcs.py b/airflow/providers/amazon/aws/example_dags/example_glacier_to_gcs.py
index 0f3aaf9e8e..593688dcbb 100644
--- a/airflow/providers/amazon/aws/example_dags/example_glacier_to_gcs.py
+++ b/airflow/providers/amazon/aws/example_dags/example_glacier_to_gcs.py
@@ -14,6 +14,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import os
 from datetime import datetime
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_sheets_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_google_api_sheets_to_s3.py
index cc83a6d1e7..86c6f96426 100644
--- a/airflow/providers/amazon/aws/example_dags/example_google_api_sheets_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_google_api_sheets_to_s3.py
@@ -18,6 +18,7 @@
 This is a basic example dag for using `GoogleApiToS3Operator` to retrieve Google Sheets data
 You need to set all env variables to request the data.
 """
+from __future__ import annotations
 
 from datetime import datetime
 from os import getenv
diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py
index 869276c67d..5463571c9c 100644
--- a/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_google_api_youtube_to_s3.py
@@ -33,6 +33,7 @@ https://developers.google.com/youtube/v3/docs/videos/list#parameters for more in
 YOUTUBE_CONN_ID is optional for public videos. It does only need to authenticate when there are private videos
 on a YouTube channel you want to retrieve.
 """
+from __future__ import annotations
 
 from datetime import datetime
 from os import getenv
diff --git a/airflow/providers/amazon/aws/example_dags/example_hive_to_dynamodb.py b/airflow/providers/amazon/aws/example_dags/example_hive_to_dynamodb.py
index b033dbb80c..4b7d143722 100644
--- a/airflow/providers/amazon/aws/example_dags/example_hive_to_dynamodb.py
+++ b/airflow/providers/amazon/aws/example_dags/example_hive_to_dynamodb.py
@@ -14,12 +14,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """
    This DAG will not work unless you create an Amazon EMR cluster running
    Apache Hive and copy data into it following steps 1-4 (inclusive) here:
    https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/EMRforDynamoDB.Tutorial.html
 """
+from __future__ import annotations
 
 import os
 from datetime import datetime
diff --git a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py
index a84f7ac1dd..b30af0ebb0 100644
--- a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py
@@ -14,11 +14,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """
 This is an example dag for using `ImapAttachmentToS3Operator` to transfer an email attachment via IMAP
 protocol from a mail server to S3 Bucket.
 """
+from __future__ import annotations
 
 from datetime import datetime
 from os import getenv
diff --git a/airflow/providers/amazon/aws/example_dags/example_mongo_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_mongo_to_s3.py
index 3b23c4651c..39e4d6ac89 100644
--- a/airflow/providers/amazon/aws/example_dags/example_mongo_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_mongo_to_s3.py
@@ -14,7 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+from __future__ import annotations
 
 import os
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_s3.py b/airflow/providers/amazon/aws/example_dags/example_s3.py
index 33b90877e4..5f5348cff1 100644
--- a/airflow/providers/amazon/aws/example_dags/example_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_s3.py
@@ -14,10 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import os
 from datetime import datetime
-from typing import List
 
 from airflow.models.baseoperator import chain
 from airflow.models.dag import DAG
@@ -62,7 +62,7 @@ with DAG(
     tags=['example'],
 ) as dag:
     # [START howto_sensor_s3_key_function_definition]
-    def check_fn(files: List) -> bool:
+    def check_fn(files: list) -> bool:
         """
         Example of custom check: check if all files are bigger than ``1kB``
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_ftp.py b/airflow/providers/amazon/aws/example_dags/example_s3_to_ftp.py
index 3f8c1b0ec3..47ca88a0af 100644
--- a/airflow/providers/amazon/aws/example_dags/example_s3_to_ftp.py
+++ b/airflow/providers/amazon/aws/example_dags/example_s3_to_ftp.py
@@ -14,7 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+from __future__ import annotations
 
 import os
 from datetime import datetime
diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
index 5cd224573d..e1b25d177d 100644
--- a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
+++ b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from datetime import datetime
 from os import getenv
diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_sftp.py b/airflow/providers/amazon/aws/example_dags/example_s3_to_sftp.py
index a544866e02..1c625b8494 100644
--- a/airflow/providers/amazon/aws/example_dags/example_s3_to_sftp.py
+++ b/airflow/providers/amazon/aws/example_dags/example_s3_to_sftp.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import os
 from datetime import datetime
diff --git a/airflow/providers/amazon/aws/example_dags/example_salesforce_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_salesforce_to_s3.py
index 78864578ce..067b5fa86e 100644
--- a/airflow/providers/amazon/aws/example_dags/example_salesforce_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_salesforce_to_s3.py
@@ -14,11 +14,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """
 This is a basic example DAG for using `SalesforceToS3Operator` to retrieve Salesforce account
 data and upload it to an Amazon S3 bucket.
 """
+from __future__ import annotations
 
 from datetime import datetime
 from os import getenv
diff --git a/airflow/providers/amazon/aws/example_dags/example_sftp_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_sftp_to_s3.py
index d67ef54b68..24f480fb2f 100644
--- a/airflow/providers/amazon/aws/example_dags/example_sftp_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_sftp_to_s3.py
@@ -14,7 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+from __future__ import annotations
 
 import os
 from datetime import datetime
diff --git a/airflow/providers/amazon/aws/example_dags/example_sql_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_sql_to_s3.py
index 47abd74ca3..d467871b4b 100644
--- a/airflow/providers/amazon/aws/example_dags/example_sql_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_sql_to_s3.py
@@ -14,7 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+from __future__ import annotations
 
 import os
 from datetime import datetime
diff --git a/airflow/providers/amazon/aws/exceptions.py b/airflow/providers/amazon/aws/exceptions.py
index cb59d5b694..b606dc504f 100644
--- a/airflow/providers/amazon/aws/exceptions.py
+++ b/airflow/providers/amazon/aws/exceptions.py
@@ -15,7 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
+from __future__ import annotations
+
 # Note: Any AirflowException raised is expected to cause the TaskInstance
 #       to be marked in an ERROR state
 
diff --git a/airflow/providers/amazon/aws/hooks/appflow.py b/airflow/providers/amazon/aws/hooks/appflow.py
index e2b628bf3d..3bf57e50e0 100644
--- a/airflow/providers/amazon/aws/hooks/appflow.py
+++ b/airflow/providers/amazon/aws/hooks/appflow.py
@@ -14,11 +14,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import json
 from datetime import datetime, timezone
 from time import sleep
-from typing import TYPE_CHECKING, List
+from typing import TYPE_CHECKING
 
 from airflow.compat.functools import cached_property
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -53,7 +54,7 @@ class AppflowHook(AwsBaseHook):
         super().__init__(*args, **kwargs)
 
     @cached_property
-    def conn(self) -> 'AppflowClient':
+    def conn(self) -> AppflowClient:
         """Get the underlying boto3 Appflow client (cached)"""
         return super().conn
 
@@ -104,7 +105,7 @@ class AppflowHook(AwsBaseHook):
         return execution_id
 
     def update_flow_filter(
-        self, flow_name: str, filter_tasks: List["TaskTypeDef"], set_trigger_ondemand: bool = False
+        self, flow_name: str, filter_tasks: list[TaskTypeDef], set_trigger_ondemand: bool = False
     ) -> None:
         """
         Update the flow task filter.
@@ -117,7 +118,7 @@ class AppflowHook(AwsBaseHook):
         """
         response = self.conn.describe_flow(flowName=flow_name)
         connector_type = response["sourceFlowConfig"]["connectorType"]
-        tasks: List["TaskTypeDef"] = []
+        tasks: list[TaskTypeDef] = []
 
         # cleanup old filter tasks
         for task in response["tasks"]:
diff --git a/airflow/providers/amazon/aws/hooks/athena.py b/airflow/providers/amazon/aws/hooks/athena.py
index f777a5b558..10fa50a3bd 100644
--- a/airflow/providers/amazon/aws/hooks/athena.py
+++ b/airflow/providers/amazon/aws/hooks/athena.py
@@ -15,7 +15,6 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """
 This module contains AWS Athena hook.
 
@@ -23,9 +22,11 @@ This module contains AWS Athena hook.
 
     PageIterator
 """
+from __future__ import annotations
+
 import warnings
 from time import sleep
-from typing import Any, Dict, Optional
+from typing import Any
 
 from botocore.paginate import PageIterator
 
@@ -67,9 +68,9 @@ class AthenaHook(AwsBaseHook):
     def run_query(
         self,
         query: str,
-        query_context: Dict[str, str],
-        result_configuration: Dict[str, Any],
-        client_request_token: Optional[str] = None,
+        query_context: dict[str, str],
+        result_configuration: dict[str, Any],
+        client_request_token: str | None = None,
         workgroup: str = 'primary',
     ) -> str:
         """
@@ -93,7 +94,7 @@ class AthenaHook(AwsBaseHook):
         response = self.get_conn().start_query_execution(**params)
         return response['QueryExecutionId']
 
-    def check_query_status(self, query_execution_id: str) -> Optional[str]:
+    def check_query_status(self, query_execution_id: str) -> str | None:
         """
         Fetch the status of submitted athena query. Returns None or one of valid query states.
 
@@ -111,7 +112,7 @@ class AthenaHook(AwsBaseHook):
             # The error is being absorbed to implement retries.
             return state
 
-    def get_state_change_reason(self, query_execution_id: str) -> Optional[str]:
+    def get_state_change_reason(self, query_execution_id: str) -> str | None:
         """
         Fetch the reason for a state change (e.g. error message). Returns None or reason string.
 
@@ -130,8 +131,8 @@ class AthenaHook(AwsBaseHook):
             return reason
 
     def get_query_results(
-        self, query_execution_id: str, next_token_id: Optional[str] = None, max_results: int = 1000
-    ) -> Optional[dict]:
+        self, query_execution_id: str, next_token_id: str | None = None, max_results: int = 1000
+    ) -> dict | None:
         """
         Fetch submitted athena query results. returns none if query is in intermediate state or
         failed/cancelled state else dict of query output
@@ -156,10 +157,10 @@ class AthenaHook(AwsBaseHook):
     def get_query_results_paginator(
         self,
         query_execution_id: str,
-        max_items: Optional[int] = None,
-        page_size: Optional[int] = None,
-        starting_token: Optional[str] = None,
-    ) -> Optional[PageIterator]:
+        max_items: int | None = None,
+        page_size: int | None = None,
+        starting_token: str | None = None,
+    ) -> PageIterator | None:
         """
         Fetch submitted athena query results. returns none if query is in intermediate state or
         failed/cancelled state else a paginator to iterate through pages of results. If you
@@ -192,9 +193,9 @@ class AthenaHook(AwsBaseHook):
     def poll_query_status(
         self,
         query_execution_id: str,
-        max_tries: Optional[int] = None,
-        max_polling_attempts: Optional[int] = None,
-    ) -> Optional[str]:
+        max_tries: int | None = None,
+        max_polling_attempts: int | None = None,
+    ) -> str | None:
         """
         Poll the status of submitted athena query until query state reaches final state.
         Returns one of the final states
@@ -264,7 +265,7 @@ class AthenaHook(AwsBaseHook):
 
         return output_location
 
-    def stop_query(self, query_execution_id: str) -> Dict:
+    def stop_query(self, query_execution_id: str) -> dict:
         """
         Cancel the submitted athena query
 
diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/airflow/providers/amazon/aws/hooks/base_aws.py
index 3a7c4f6d0a..f6ce992e41 100644
--- a/airflow/providers/amazon/aws/hooks/base_aws.py
+++ b/airflow/providers/amazon/aws/hooks/base_aws.py
@@ -15,7 +15,6 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """
 This module contains Base AWS Hook.
 
@@ -23,13 +22,14 @@ This module contains Base AWS Hook.
     For more information on how to use this hook, take a look at the guide:
     :ref:`howto/connection:AWSHook`
 """
+from __future__ import annotations
 
 import datetime
 import json
 import logging
 import warnings
 from functools import wraps
-from typing import Any, Callable, Dict, Generic, Optional, Tuple, Type, TypeVar, Union
+from typing import Any, Callable, Generic, TypeVar, Union
 
 import boto3
 import botocore
@@ -68,9 +68,9 @@ class BaseSessionFactory(LoggingMixin):
 
     def __init__(
         self,
-        conn: Optional[Union[Connection, AwsConnectionWrapper]],
-        region_name: Optional[str] = None,
-        config: Optional[Config] = None,
+        conn: Connection | AwsConnectionWrapper | None,
+        region_name: str | None = None,
+        config: Config | None = None,
     ) -> None:
         super().__init__()
         self._conn = conn
@@ -92,22 +92,22 @@ class BaseSessionFactory(LoggingMixin):
         return self._create_basic_session(session_kwargs=self.conn.session_kwargs)
 
     @property
-    def extra_config(self) -> Dict[str, Any]:
+    def extra_config(self) -> dict[str, Any]:
         """AWS Connection extra_config."""
         return self.conn.extra_config
 
     @property
-    def region_name(self) -> Optional[str]:
+    def region_name(self) -> str | None:
         """AWS Region Name read-only property."""
         return self.conn.region_name
 
     @property
-    def config(self) -> Optional[Config]:
+    def config(self) -> Config | None:
         """Configuration for botocore client read-only property."""
         return self.conn.botocore_config
 
     @property
-    def role_arn(self) -> Optional[str]:
+    def role_arn(self) -> str | None:
         """Assume Role ARN from AWS Connection"""
         return self.conn.role_arn
 
@@ -124,10 +124,10 @@ class BaseSessionFactory(LoggingMixin):
             return self.basic_session
         return self._create_session_with_assume_role(session_kwargs=self.conn.session_kwargs)
 
-    def _create_basic_session(self, session_kwargs: Dict[str, Any]) -> boto3.session.Session:
+    def _create_basic_session(self, session_kwargs: dict[str, Any]) -> boto3.session.Session:
         return boto3.session.Session(**session_kwargs)
 
-    def _create_session_with_assume_role(self, session_kwargs: Dict[str, Any]) -> boto3.session.Session:
+    def _create_session_with_assume_role(self, session_kwargs: dict[str, Any]) -> boto3.session.Session:
         if self.conn.assume_role_method == 'assume_role_with_web_identity':
             # Deferred credentials have no initial credentials
             credential_fetcher = self._get_web_identity_credential_fetcher()
@@ -151,7 +151,7 @@ class BaseSessionFactory(LoggingMixin):
 
         return boto3.session.Session(botocore_session=session, **session_kwargs)
 
-    def _refresh_credentials(self) -> Dict[str, Any]:
+    def _refresh_credentials(self) -> dict[str, Any]:
         self.log.debug('Refreshing credentials')
         assume_role_method = self.conn.assume_role_method
         if assume_role_method not in ('assume_role', 'assume_role_with_saml'):
@@ -179,7 +179,7 @@ class BaseSessionFactory(LoggingMixin):
         }
         return credentials
 
-    def _assume_role(self, sts_client: boto3.client) -> Dict:
+    def _assume_role(self, sts_client: boto3.client) -> dict:
         kw = {
             "RoleSessionName": self._strip_invalid_session_name_characters(f"Airflow_{self.conn.conn_id}"),
             **self.conn.assume_role_kwargs,
@@ -187,7 +187,7 @@ class BaseSessionFactory(LoggingMixin):
         }
         return sts_client.assume_role(**kw)
 
-    def _assume_role_with_saml(self, sts_client: boto3.client) -> Dict[str, Any]:
+    def _assume_role_with_saml(self, sts_client: boto3.client) -> dict[str, Any]:
         saml_config = self.extra_config['assume_role_with_saml']
         principal_arn = saml_config['principal_arn']
 
@@ -209,7 +209,7 @@ class BaseSessionFactory(LoggingMixin):
         )
 
     def _get_idp_response(
-        self, saml_config: Dict[str, Any], auth: requests.auth.AuthBase
+        self, saml_config: dict[str, Any], auth: requests.auth.AuthBase
     ) -> requests.models.Response:
         idp_url = saml_config["idp_url"]
         self.log.debug("idp_url= %s", idp_url)
@@ -237,7 +237,7 @@ class BaseSessionFactory(LoggingMixin):
 
         return idp_response
 
-    def _fetch_saml_assertion_using_http_spegno_auth(self, saml_config: Dict[str, Any]) -> str:
+    def _fetch_saml_assertion_using_http_spegno_auth(self, saml_config: dict[str, Any]) -> str:
         # requests_gssapi will need paramiko > 2.6 since you'll need
         # 'gssapi' not 'python-gssapi' from PyPi.
         # https://github.com/paramiko/paramiko/pull/1311
@@ -322,7 +322,7 @@ class BaseSessionFactory(LoggingMixin):
     def _strip_invalid_session_name_characters(self, role_session_name: str) -> str:
         return slugify(role_session_name, regex_pattern=r'[^\w+=,.@-]+')
 
-    def _get_region_name(self) -> Optional[str]:
+    def _get_region_name(self) -> str | None:
         warnings.warn(
             "`BaseSessionFactory._get_region_name` method deprecated and will be removed "
             "in a future releases. Please use `BaseSessionFactory.region_name` property instead.",
@@ -331,7 +331,7 @@ class BaseSessionFactory(LoggingMixin):
         )
         return self.region_name
 
-    def _read_role_arn_from_extra_config(self) -> Optional[str]:
+    def _read_role_arn_from_extra_config(self) -> str | None:
         warnings.warn(
             "`BaseSessionFactory._read_role_arn_from_extra_config` method deprecated and will be removed "
             "in a future releases. Please use `BaseSessionFactory.role_arn` property instead.",
@@ -340,7 +340,7 @@ class BaseSessionFactory(LoggingMixin):
         )
         return self.role_arn
 
-    def _read_credentials_from_connection(self) -> Tuple[Optional[str], Optional[str]]:
+    def _read_credentials_from_connection(self) -> tuple[str | None, str | None]:
         warnings.warn(
             "`BaseSessionFactory._read_credentials_from_connection` method deprecated and will be removed "
             "in a future releases. Please use `BaseSessionFactory.conn.aws_access_key_id` and "
@@ -377,12 +377,12 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
 
     def __init__(
         self,
-        aws_conn_id: Optional[str] = default_conn_name,
-        verify: Optional[Union[bool, str]] = None,
-        region_name: Optional[str] = None,
-        client_type: Optional[str] = None,
-        resource_type: Optional[str] = None,
-        config: Optional[Config] = None,
+        aws_conn_id: str | None = default_conn_name,
+        verify: bool | str | None = None,
+        region_name: str | None = None,
+        client_type: str | None = None,
+        resource_type: str | None = None,
+        config: Config | None = None,
     ) -> None:
         super().__init__()
         self.aws_conn_id = aws_conn_id
@@ -415,21 +415,21 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
         )
 
     @property
-    def region_name(self) -> Optional[str]:
+    def region_name(self) -> str | None:
         """AWS Region Name read-only property."""
         return self.conn_config.region_name
 
     @property
-    def config(self) -> Optional[Config]:
+    def config(self) -> Config | None:
         """Configuration for botocore client read-only property."""
         return self.conn_config.botocore_config
 
     @property
-    def verify(self) -> Optional[Union[bool, str]]:
+    def verify(self) -> bool | str | None:
         """Verify or not SSL certificates boto3 client/resource read-only property."""
         return self.conn_config.verify
 
-    def get_session(self, region_name: Optional[str] = None) -> boto3.session.Session:
+    def get_session(self, region_name: str | None = None) -> boto3.session.Session:
         """Get the underlying boto3.session.Session(region_name=region_name)."""
         return SessionFactory(
             conn=self.conn_config, region_name=region_name, config=self.config
@@ -437,8 +437,8 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
 
     def get_client_type(
         self,
-        region_name: Optional[str] = None,
-        config: Optional[Config] = None,
+        region_name: str | None = None,
+        config: Config | None = None,
     ) -> boto3.client:
         """Get the underlying boto3 client using boto3 session"""
         client_type = self.client_type
@@ -455,8 +455,8 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
 
     def get_resource_type(
         self,
-        region_name: Optional[str] = None,
-        config: Optional[Config] = None,
+        region_name: str | None = None,
+        config: Config | None = None,
     ) -> boto3.resource:
         """Get the underlying boto3 resource using boto3 session"""
         resource_type = self.resource_type
@@ -520,7 +520,7 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
         # Compat shim
         return self.conn
 
-    def get_credentials(self, region_name: Optional[str] = None) -> ReadOnlyCredentials:
+    def get_credentials(self, region_name: str | None = None) -> ReadOnlyCredentials:
         """
         Get the underlying `botocore.Credentials` object.
 
@@ -536,7 +536,7 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
             mask_secret(creds.token)
         return creds
 
-    def expand_role(self, role: str, region_name: Optional[str] = None) -> str:
+    def expand_role(self, role: str, region_name: str | None = None) -> str:
         """
         If the IAM role is a role name, get the Amazon Resource Name (ARN) for the role.
         If IAM role is already an IAM role ARN, no change is made.
@@ -586,7 +586,7 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
 
         return retry_decorator
 
-    def _get_credentials(self, region_name: Optional[str]) -> Tuple[boto3.session.Session, Optional[str]]:
+    def _get_credentials(self, region_name: str | None) -> tuple[boto3.session.Session, str | None]:
         warnings.warn(
             "`AwsGenericHook._get_credentials` method deprecated and will be removed in a future releases. "
             "Please use `AwsGenericHook.get_session` method and "
@@ -598,7 +598,7 @@ class AwsGenericHook(BaseHook, Generic[BaseAwsConnection]):
         return self.get_session(region_name=region_name), self.conn_config.endpoint_url
 
     @staticmethod
-    def get_ui_field_behaviour() -> Dict[str, Any]:
+    def get_ui_field_behaviour() -> dict[str, Any]:
         """Returns custom UI field behaviour for AWS Connection."""
         return {
             "hidden_fields": ["host", "schema", "port"],
@@ -660,7 +660,7 @@ class AwsBaseHook(AwsGenericHook[Union[boto3.client, boto3.resource]]):
     """
 
 
-def resolve_session_factory() -> Type[BaseSessionFactory]:
+def resolve_session_factory() -> type[BaseSessionFactory]:
     """Resolves custom SessionFactory class"""
     clazz = conf.getimport("aws", "session_factory", fallback=None)
     if not clazz:
@@ -676,9 +676,7 @@ def resolve_session_factory() -> Type[BaseSessionFactory]:
 SessionFactory = resolve_session_factory()
 
 
-def _parse_s3_config(
-    config_file_name: str, config_format: Optional[str] = "boto", profile: Optional[str] = None
-):
+def _parse_s3_config(config_file_name: str, config_format: str | None = "boto", profile: str | None = None):
     """For compatibility with airflow.contrib.hooks.aws_hook"""
     from airflow.providers.amazon.aws.utils.connection_wrapper import _parse_s3_config
 
diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/airflow/providers/amazon/aws/hooks/batch_client.py
index ccc617cde7..6f33ea822b 100644
--- a/airflow/providers/amazon/aws/hooks/batch_client.py
+++ b/airflow/providers/amazon/aws/hooks/batch_client.py
@@ -24,9 +24,10 @@ A client for AWS Batch services
     - http://boto3.readthedocs.io/en/latest/reference/services/batch.html
     - https://docs.aws.amazon.com/batch/latest/APIReference/Welcome.html
 """
+from __future__ import annotations
+
 from random import uniform
 from time import sleep
-from typing import Dict, List, Optional, Union
 
 import botocore.client
 import botocore.exceptions
@@ -50,7 +51,7 @@ class BatchProtocol(Protocol):
         - http://boto3.readthedocs.io/en/latest/reference/services/batch.html
     """
 
-    def describe_jobs(self, jobs: List[str]) -> Dict:
+    def describe_jobs(self, jobs: list[str]) -> dict:
         """
         Get job descriptions from AWS Batch
 
@@ -93,11 +94,11 @@ class BatchProtocol(Protocol):
         jobName: str,
         jobQueue: str,
         jobDefinition: str,
-        arrayProperties: Dict,
-        parameters: Dict,
-        containerOverrides: Dict,
-        tags: Dict,
-    ) -> Dict:
+        arrayProperties: dict,
+        parameters: dict,
+        containerOverrides: dict,
+        tags: dict,
+    ) -> dict:
         """
         Submit a Batch job
 
@@ -120,7 +121,7 @@ class BatchProtocol(Protocol):
         """
         ...
 
-    def terminate_job(self, jobId: str, reason: str) -> Dict:
+    def terminate_job(self, jobId: str, reason: str) -> dict:
         """
         Terminate a Batch job
 
@@ -198,7 +199,7 @@ class BatchClientHook(AwsBaseHook):
     JOB_QUEUE_INTERMEDIATE_STATUS = ('CREATING', 'UPDATING', 'DELETING')
 
     def __init__(
-        self, *args, max_retries: Optional[int] = None, status_retries: Optional[int] = None, **kwargs
+        self, *args, max_retries: int | None = None, status_retries: int | None = None, **kwargs
     ) -> None:
         # https://github.com/python/mypy/issues/6799 hence type: ignore
         super().__init__(client_type='batch', *args, **kwargs)  # type: ignore
@@ -206,7 +207,7 @@ class BatchClientHook(AwsBaseHook):
         self.status_retries = status_retries or self.STATUS_RETRIES
 
     @property
-    def client(self) -> Union[BatchProtocol, botocore.client.BaseClient]:
+    def client(self) -> BatchProtocol | botocore.client.BaseClient:
         """
         An AWS API client for Batch services.
 
@@ -215,7 +216,7 @@ class BatchClientHook(AwsBaseHook):
         """
         return self.conn
 
-    def terminate_job(self, job_id: str, reason: str) -> Dict:
+    def terminate_job(self, job_id: str, reason: str) -> dict:
         """
         Terminate a Batch job
 
@@ -256,7 +257,7 @@ class BatchClientHook(AwsBaseHook):
 
         raise AirflowException(f"AWS Batch job ({job_id}) has unknown status: {job}")
 
-    def wait_for_job(self, job_id: str, delay: Union[int, float, None] = None) -> None:
+    def wait_for_job(self, job_id: str, delay: int | float | None = None) -> None:
         """
         Wait for Batch job to complete
 
@@ -271,7 +272,7 @@ class BatchClientHook(AwsBaseHook):
         self.poll_for_job_complete(job_id, delay)
         self.log.info("AWS Batch job (%s) has completed", job_id)
 
-    def poll_for_job_running(self, job_id: str, delay: Union[int, float, None] = None) -> None:
+    def poll_for_job_running(self, job_id: str, delay: int | float | None = None) -> None:
         """
         Poll for job running. The status that indicates a job is running or
         already complete are: 'RUNNING'|'SUCCEEDED'|'FAILED'.
@@ -293,7 +294,7 @@ class BatchClientHook(AwsBaseHook):
         running_status = [self.RUNNING_STATE, self.SUCCESS_STATE, self.FAILURE_STATE]
         self.poll_job_status(job_id, running_status)
 
-    def poll_for_job_complete(self, job_id: str, delay: Union[int, float, None] = None) -> None:
+    def poll_for_job_complete(self, job_id: str, delay: int | float | None = None) -> None:
         """
         Poll for job completion. The status that indicates job completion
         are: 'SUCCEEDED'|'FAILED'.
@@ -311,7 +312,7 @@ class BatchClientHook(AwsBaseHook):
         complete_status = [self.SUCCESS_STATE, self.FAILURE_STATE]
         self.poll_job_status(job_id, complete_status)
 
-    def poll_job_status(self, job_id: str, match_status: List[str]) -> bool:
+    def poll_job_status(self, job_id: str, match_status: list[str]) -> bool:
         """
         Poll for job status using an exponential back-off strategy (with max_retries).
 
@@ -353,7 +354,7 @@ class BatchClientHook(AwsBaseHook):
             )
             self.delay(pause)
 
-    def get_job_description(self, job_id: str) -> Dict:
+    def get_job_description(self, job_id: str) -> dict:
         """
         Get job description (using status_retries).
 
@@ -395,7 +396,7 @@ class BatchClientHook(AwsBaseHook):
             self.delay(pause)
 
     @staticmethod
-    def parse_job_description(job_id: str, response: Dict) -> Dict:
+    def parse_job_description(job_id: str, response: dict) -> dict:
         """
         Parse job description to extract description for job_id
 
@@ -415,7 +416,7 @@ class BatchClientHook(AwsBaseHook):
 
         return matching_jobs[0]
 
-    def get_job_awslogs_info(self, job_id: str) -> Optional[Dict[str, str]]:
+    def get_job_awslogs_info(self, job_id: str) -> dict[str, str] | None:
         """
         Parse job description to extract AWS CloudWatch information.
 
@@ -455,9 +456,7 @@ class BatchClientHook(AwsBaseHook):
         }
 
     @staticmethod
-    def add_jitter(
-        delay: Union[int, float], width: Union[int, float] = 1, minima: Union[int, float] = 0
-    ) -> float:
+    def add_jitter(delay: int | float, width: int | float = 1, minima: int | float = 0) -> float:
         """
         Use delay +/- width for random jitter
 
@@ -486,7 +485,7 @@ class BatchClientHook(AwsBaseHook):
         return uniform(lower, upper)
 
     @staticmethod
-    def delay(delay: Union[int, float, None] = None) -> None:
+    def delay(delay: int | float | None = None) -> None:
         """
         Pause execution for ``delay`` seconds.
 
diff --git a/airflow/providers/amazon/aws/hooks/batch_waiters.py b/airflow/providers/amazon/aws/hooks/batch_waiters.py
index b984d3e23f..ccb4f988de 100644
--- a/airflow/providers/amazon/aws/hooks/batch_waiters.py
+++ b/airflow/providers/amazon/aws/hooks/batch_waiters.py
@@ -15,8 +15,6 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
-
 """
 AWS Batch service waiters
 
@@ -25,12 +23,13 @@ AWS Batch service waiters
     - https://boto3.amazonaws.com/v1/documentation/api/latest/guide/clients.html#waiters
     - https://github.com/boto/botocore/blob/develop/botocore/waiter.py
 """
+from __future__ import annotations
 
 import json
 import sys
 from copy import deepcopy
 from pathlib import Path
-from typing import Dict, List, Optional, Union
+from typing import Dict, Optional
 
 import botocore.client
 import botocore.exceptions
@@ -98,7 +97,7 @@ class BatchWaitersHook(BatchClientHook):
         Override the AWS region in connection (if provided)
     """
 
-    def __init__(self, *args, waiter_config: Optional[Dict] = None, **kwargs) -> None:
+    def __init__(self, *args, waiter_config: dict | None = None, **kwargs) -> None:
 
         super().__init__(*args, **kwargs)
 
@@ -107,7 +106,7 @@ class BatchWaitersHook(BatchClientHook):
         self._waiter_model = botocore.waiter.WaiterModel(self._waiter_config)
 
     @property
-    def default_config(self) -> Dict:
+    def default_config(self) -> dict:
         """
         An immutable default waiter configuration
 
@@ -121,7 +120,7 @@ class BatchWaitersHook(BatchClientHook):
         return deepcopy(self._default_config)  # avoid accidental mutation
 
     @property
-    def waiter_config(self) -> Dict:
+    def waiter_config(self) -> dict:
         """
         An immutable waiter configuration for this instance; a ``deepcopy`` is returned by this
         property. During the init for BatchWaiters, the waiter_config is used to build a
@@ -178,7 +177,7 @@ class BatchWaitersHook(BatchClientHook):
         """
         return botocore.waiter.create_waiter_with_client(waiter_name, self.waiter_model, self.client)
 
-    def list_waiters(self) -> List[str]:
+    def list_waiters(self) -> list[str]:
         """
         List the waiters in a waiter configuration for AWS Batch services.
 
@@ -187,7 +186,7 @@ class BatchWaitersHook(BatchClientHook):
         """
         return self.waiter_model.waiter_names
 
-    def wait_for_job(self, job_id: str, delay: Union[int, float, None] = None) -> None:
+    def wait_for_job(self, job_id: str, delay: int | float | None = None) -> None:
         """
         Wait for Batch job to complete.  This assumes that the ``.waiter_model`` is configured
         using some variation of the ``.default_config`` so that it can generate waiters with the
diff --git a/airflow/providers/amazon/aws/hooks/cloud_formation.py b/airflow/providers/amazon/aws/hooks/cloud_formation.py
index 6c17ad81e0..a6606fedfd 100644
--- a/airflow/providers/amazon/aws/hooks/cloud_formation.py
+++ b/airflow/providers/amazon/aws/hooks/cloud_formation.py
@@ -15,9 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains AWS CloudFormation Hook"""
-from typing import Optional, Union
+from __future__ import annotations
 
 from boto3 import client, resource
 from botocore.exceptions import ClientError
@@ -39,7 +38,7 @@ class CloudFormationHook(AwsBaseHook):
     def __init__(self, *args, **kwargs):
         super().__init__(client_type='cloudformation', *args, **kwargs)
 
-    def get_stack_status(self, stack_name: Union[client, resource]) -> Optional[dict]:
+    def get_stack_status(self, stack_name: client | resource) -> dict | None:
         """Get stack status from CloudFormation."""
         self.log.info('Poking for stack %s', stack_name)
 
@@ -63,7 +62,7 @@ class CloudFormationHook(AwsBaseHook):
             cloudformation_parameters['StackName'] = stack_name
         self.get_conn().create_stack(**cloudformation_parameters)
 
-    def delete_stack(self, stack_name: str, cloudformation_parameters: Optional[dict] = None) -> None:
+    def delete_stack(self, stack_name: str, cloudformation_parameters: dict | None = None) -> None:
         """
         Delete stack in CloudFormation.
 
diff --git a/airflow/providers/amazon/aws/hooks/datasync.py b/airflow/providers/amazon/aws/hooks/datasync.py
index 6db8438c49..2c337bf7db 100644
--- a/airflow/providers/amazon/aws/hooks/datasync.py
+++ b/airflow/providers/amazon/aws/hooks/datasync.py
@@ -14,11 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """Interact with AWS DataSync, using the AWS ``boto3`` library."""
+from __future__ import annotations
 
 import time
-from typing import List, Optional
 
 from airflow.exceptions import AirflowBadRequest, AirflowException, AirflowTaskTimeout
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -85,7 +84,7 @@ class DataSyncHook(AwsBaseHook):
 
     def get_location_arns(
         self, location_uri: str, case_sensitive: bool = False, ignore_trailing_slash: bool = True
-    ) -> List[str]:
+    ) -> list[str]:
         """
         Return all LocationArns which match a LocationUri.
 
@@ -262,7 +261,7 @@ class DataSyncHook(AwsBaseHook):
         """
         return self.get_conn().describe_task_execution(TaskExecutionArn=task_execution_arn)
 
-    def get_current_task_execution_arn(self, task_arn: str) -> Optional[str]:
+    def get_current_task_execution_arn(self, task_arn: str) -> str | None:
         """
         Get current TaskExecutionArn (if one exists) for the specified ``task_arn``.
 
diff --git a/airflow/providers/amazon/aws/hooks/dms.py b/airflow/providers/amazon/aws/hooks/dms.py
index a1bd19daf3..b85048f1e4 100644
--- a/airflow/providers/amazon/aws/hooks/dms.py
+++ b/airflow/providers/amazon/aws/hooks/dms.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import json
 from enum import Enum
-from typing import Optional
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
@@ -54,9 +55,7 @@ class DmsHook(AwsBaseHook):
 
         return response.get('Marker'), response.get('ReplicationTasks', [])
 
-    def find_replication_tasks_by_arn(
-        self, replication_task_arn: str, without_settings: Optional[bool] = False
-    ):
+    def find_replication_tasks_by_arn(self, replication_task_arn: str, without_settings: bool | None = False):
         """
         Find and describe replication tasks by task ARN
         :param replication_task_arn: Replication task arn
@@ -76,7 +75,7 @@ class DmsHook(AwsBaseHook):
 
         return tasks
 
-    def get_task_status(self, replication_task_arn: str) -> Optional[str]:
+    def get_task_status(self, replication_task_arn: str) -> str | None:
         """
         Retrieve task status.
 
diff --git a/airflow/providers/amazon/aws/hooks/dynamodb.py b/airflow/providers/amazon/aws/hooks/dynamodb.py
index bb0637ae64..52c96e9b02 100644
--- a/airflow/providers/amazon/aws/hooks/dynamodb.py
+++ b/airflow/providers/amazon/aws/hooks/dynamodb.py
@@ -15,10 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-
 """This module contains the AWS DynamoDB hook"""
-from typing import Iterable, List, Optional
+from __future__ import annotations
+
+from typing import Iterable
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -39,7 +39,7 @@ class DynamoDBHook(AwsBaseHook):
     """
 
     def __init__(
-        self, *args, table_keys: Optional[List] = None, table_name: Optional[str] = None, **kwargs
+        self, *args, table_keys: list | None = None, table_name: str | None = None, **kwargs
     ) -> None:
         self.table_keys = table_keys
         self.table_name = table_name
diff --git a/airflow/providers/amazon/aws/hooks/ec2.py b/airflow/providers/amazon/aws/hooks/ec2.py
index 96dbaf5410..dadef2d75e 100644
--- a/airflow/providers/amazon/aws/hooks/ec2.py
+++ b/airflow/providers/amazon/aws/hooks/ec2.py
@@ -15,11 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
+from __future__ import annotations
 
 import functools
 import time
-from typing import List, Optional
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -70,7 +69,7 @@ class EC2Hook(AwsBaseHook):
 
         super().__init__(*args, **kwargs)
 
-    def get_instance(self, instance_id: str, filters: Optional[List] = None):
+    def get_instance(self, instance_id: str, filters: list | None = None):
         """
         Get EC2 instance by id and return it.
 
@@ -121,7 +120,7 @@ class EC2Hook(AwsBaseHook):
         return self.conn.terminate_instances(InstanceIds=instance_ids)
 
     @only_client_type
-    def describe_instances(self, filters: Optional[List] = None, instance_ids: Optional[List] = None):
+    def describe_instances(self, filters: list | None = None, instance_ids: list | None = None):
         """
         Describe EC2 instances, optionally applying filters and selective instance ids
 
@@ -138,7 +137,7 @@ class EC2Hook(AwsBaseHook):
         return self.conn.describe_instances(Filters=filters, InstanceIds=instance_ids)
 
     @only_client_type
-    def get_instances(self, filters: Optional[List] = None, instance_ids: Optional[List] = None) -> list:
+    def get_instances(self, filters: list | None = None, instance_ids: list | None = None) -> list:
         """
         Get list of instance details, optionally applying filters and selective instance ids
 
@@ -153,7 +152,7 @@ class EC2Hook(AwsBaseHook):
         ]
 
     @only_client_type
-    def get_instance_ids(self, filters: Optional[List] = None) -> list:
+    def get_instance_ids(self, filters: list | None = None) -> list:
         """
         Get list of instance ids, optionally applying filters to fetch selective instances
 
diff --git a/airflow/providers/amazon/aws/hooks/ecs.py b/airflow/providers/amazon/aws/hooks/ecs.py
index 9c94bdb8d4..cf24e2b59a 100644
--- a/airflow/providers/amazon/aws/hooks/ecs.py
+++ b/airflow/providers/amazon/aws/hooks/ecs.py
@@ -15,13 +15,15 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import time
 from collections import deque
 from datetime import datetime, timedelta
 from enum import Enum
 from logging import Logger
 from threading import Event, Thread
-from typing import Dict, Generator, Optional
+from typing import Generator
 
 from botocore.exceptions import ClientError, ConnectionClosedError
 from botocore.waiter import Waiter
@@ -129,8 +131,8 @@ class EcsTaskLogFetcher(Thread):
         log_stream_name: str,
         fetch_interval: timedelta,
         logger: Logger,
-        aws_conn_id: Optional[str] = 'aws_default',
-        region_name: Optional[str] = None,
+        aws_conn_id: str | None = 'aws_default',
+        region_name: str | None = None,
     ):
         super().__init__()
         self._event = Event()
@@ -173,7 +175,7 @@ class EcsTaskLogFetcher(Thread):
     def get_last_log_messages(self, number_messages) -> list:
         return [log['message'] for log in deque(self._get_log_events(), maxlen=number_messages)]
 
-    def get_last_log_message(self) -> Optional[str]:
+    def get_last_log_message(self) -> str | None:
         try:
             return self.get_last_log_messages(1)[0]
         except IndexError:
@@ -198,7 +200,7 @@ class EcsProtocol(Protocol):
         - https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html
     """
 
-    def run_task(self, **kwargs) -> Dict:
+    def run_task(self, **kwargs) -> dict:
         """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.run_task"""  # noqa: E501
         ...
 
@@ -206,18 +208,18 @@ class EcsProtocol(Protocol):
         """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.get_waiter"""  # noqa: E501
         ...
 
-    def describe_tasks(self, cluster: str, tasks) -> Dict:
+    def describe_tasks(self, cluster: str, tasks) -> dict:
         """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.describe_tasks"""  # noqa: E501
         ...
 
-    def stop_task(self, cluster, task, reason: str) -> Dict:
+    def stop_task(self, cluster, task, reason: str) -> dict:
         """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.stop_task"""  # noqa: E501
         ...
 
-    def describe_task_definition(self, taskDefinition: str) -> Dict:
+    def describe_task_definition(self, taskDefinition: str) -> dict:
         """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.describe_task_definition"""  # noqa: E501
         ...
 
-    def list_tasks(self, cluster: str, launchType: str, desiredStatus: str, family: str) -> Dict:
+    def list_tasks(self, cluster: str, launchType: str, desiredStatus: str, family: str) -> dict:
         """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.list_tasks"""  # noqa: E501
         ...
diff --git a/airflow/providers/amazon/aws/hooks/eks.py b/airflow/providers/amazon/aws/hooks/eks.py
index 3157b2dbf8..f3e0a8d130 100644
--- a/airflow/providers/amazon/aws/hooks/eks.py
+++ b/airflow/providers/amazon/aws/hooks/eks.py
@@ -14,8 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """Interact with Amazon EKS, using the boto3 library."""
+from __future__ import annotations
+
 import base64
 import json
 import sys
@@ -23,7 +24,7 @@ import tempfile
 from contextlib import contextmanager
 from enum import Enum
 from functools import partial
-from typing import Callable, Dict, Generator, List, Optional
+from typing import Callable, Generator
 
 from botocore.exceptions import ClientError
 from botocore.signers import RequestSigner
@@ -95,9 +96,9 @@ class EksHook(AwsBaseHook):
         self,
         name: str,
         roleArn: str,
-        resourcesVpcConfig: Dict,
+        resourcesVpcConfig: dict,
         **kwargs,
-    ) -> Dict:
+    ) -> dict:
         """
         Creates an Amazon EKS control plane.
 
@@ -122,12 +123,12 @@ class EksHook(AwsBaseHook):
         self,
         clusterName: str,
         nodegroupName: str,
-        subnets: List[str],
-        nodeRole: Optional[str],
+        subnets: list[str],
+        nodeRole: str | None,
         *,
-        tags: Optional[Dict] = None,
+        tags: dict | None = None,
         **kwargs,
-    ) -> Dict:
+    ) -> dict:
         """
         Creates an Amazon EKS managed node group for an Amazon EKS Cluster.
 
@@ -169,11 +170,11 @@ class EksHook(AwsBaseHook):
     def create_fargate_profile(
         self,
         clusterName: str,
-        fargateProfileName: Optional[str],
-        podExecutionRoleArn: Optional[str],
-        selectors: List,
+        fargateProfileName: str | None,
+        podExecutionRoleArn: str | None,
+        selectors: list,
         **kwargs,
-    ) -> Dict:
+    ) -> dict:
         """
         Creates an AWS Fargate profile for an Amazon EKS cluster.
 
@@ -203,7 +204,7 @@ class EksHook(AwsBaseHook):
         )
         return response
 
-    def delete_cluster(self, name: str) -> Dict:
+    def delete_cluster(self, name: str) -> dict:
         """
         Deletes the Amazon EKS Cluster control plane.
 
@@ -219,7 +220,7 @@ class EksHook(AwsBaseHook):
         self.log.info("Deleted Amazon EKS cluster with the name %s.", response.get('cluster').get('name'))
         return response
 
-    def delete_nodegroup(self, clusterName: str, nodegroupName: str) -> Dict:
+    def delete_nodegroup(self, clusterName: str, nodegroupName: str) -> dict:
         """
         Deletes an Amazon EKS managed node group from a specified cluster.
 
@@ -240,7 +241,7 @@ class EksHook(AwsBaseHook):
         )
         return response
 
-    def delete_fargate_profile(self, clusterName: str, fargateProfileName: str) -> Dict:
+    def delete_fargate_profile(self, clusterName: str, fargateProfileName: str) -> dict:
         """
         Deletes an AWS Fargate profile from a specified Amazon EKS cluster.
 
@@ -263,7 +264,7 @@ class EksHook(AwsBaseHook):
         )
         return response
 
-    def describe_cluster(self, name: str, verbose: bool = False) -> Dict:
+    def describe_cluster(self, name: str, verbose: bool = False) -> dict:
         """
         Returns descriptive information about an Amazon EKS Cluster.
 
@@ -285,7 +286,7 @@ class EksHook(AwsBaseHook):
             self.log.info("Amazon EKS cluster details: %s", json.dumps(cluster_data, cls=AirflowJsonEncoder))
         return response
 
-    def describe_nodegroup(self, clusterName: str, nodegroupName: str, verbose: bool = False) -> Dict:
+    def describe_nodegroup(self, clusterName: str, nodegroupName: str, verbose: bool = False) -> dict:
         """
         Returns descriptive information about an Amazon EKS managed node group.
 
@@ -315,7 +316,7 @@ class EksHook(AwsBaseHook):
 
     def describe_fargate_profile(
         self, clusterName: str, fargateProfileName: str, verbose: bool = False
-    ) -> Dict:
+    ) -> dict:
         """
         Returns descriptive information about an AWS Fargate profile.
 
@@ -413,7 +414,7 @@ class EksHook(AwsBaseHook):
     def list_clusters(
         self,
         verbose: bool = False,
-    ) -> List:
+    ) -> list:
         """
         Lists all Amazon EKS Clusters in your AWS account.
 
@@ -431,7 +432,7 @@ class EksHook(AwsBaseHook):
         self,
         clusterName: str,
         verbose: bool = False,
-    ) -> List:
+    ) -> list:
         """
         Lists all Amazon EKS managed node groups associated with the specified cluster.
 
@@ -450,7 +451,7 @@ class EksHook(AwsBaseHook):
         self,
         clusterName: str,
         verbose: bool = False,
-    ) -> List:
+    ) -> list:
         """
         Lists all AWS Fargate profiles associated with the specified cluster.
 
@@ -467,7 +468,7 @@ class EksHook(AwsBaseHook):
             api_call=list_fargate_profiles_call, response_key="fargateProfileNames", verbose=verbose
         )
 
-    def _list_all(self, api_call: Callable, response_key: str, verbose: bool) -> List:
+    def _list_all(self, api_call: Callable, response_key: str, verbose: bool) -> list:
         """
         Repeatedly calls a provided boto3 API Callable and collates the responses into a List.
 
@@ -478,7 +479,7 @@ class EksHook(AwsBaseHook):
         :return: A List of the combined results of the provided API call.
         :rtype: List
         """
-        name_collection: List = []
+        name_collection: list = []
         token = DEFAULT_PAGINATION_TOKEN
 
         while token is not None:
@@ -497,7 +498,7 @@ class EksHook(AwsBaseHook):
     def generate_config_file(
         self,
         eks_cluster_name: str,
-        pod_namespace: Optional[str],
+        pod_namespace: str | None,
     ) -> Generator[str, None, None]:
         """
         Writes the kubeconfig file given an EKS Cluster.
diff --git a/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py b/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py
index 47af28845d..0347281799 100644
--- a/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py
+++ b/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py
@@ -15,8 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from time import sleep
-from typing import Optional
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -104,9 +105,9 @@ class ElastiCacheReplicationGroupHook(AwsBaseHook):
     def wait_for_availability(
         self,
         replication_group_id: str,
-        initial_sleep_time: Optional[float] = None,
-        exponential_back_off_factor: Optional[float] = None,
-        max_retries: Optional[int] = None,
+        initial_sleep_time: float | None = None,
+        exponential_back_off_factor: float | None = None,
+        max_retries: int | None = None,
     ):
         """
         Check if replication group is available or not by performing a describe over it
@@ -159,9 +160,9 @@ class ElastiCacheReplicationGroupHook(AwsBaseHook):
     def wait_for_deletion(
         self,
         replication_group_id: str,
-        initial_sleep_time: Optional[float] = None,
-        exponential_back_off_factor: Optional[float] = None,
-        max_retries: Optional[int] = None,
+        initial_sleep_time: float | None = None,
+        exponential_back_off_factor: float | None = None,
+        max_retries: int | None = None,
     ):
         """
         Helper for deleting a replication group ensuring it is either deleted or can't be deleted
@@ -235,9 +236,9 @@ class ElastiCacheReplicationGroupHook(AwsBaseHook):
     def ensure_delete_replication_group(
         self,
         replication_group_id: str,
-        initial_sleep_time: Optional[float] = None,
-        exponential_back_off_factor: Optional[float] = None,
-        max_retries: Optional[int] = None,
+        initial_sleep_time: float | None = None,
+        exponential_back_off_factor: float | None = None,
+        max_retries: int | None = None,
     ):
         """
         Delete a replication group ensuring it is either deleted or can't be deleted
diff --git a/airflow/providers/amazon/aws/hooks/emr.py b/airflow/providers/amazon/aws/hooks/emr.py
index 8c86edf0f8..ebb2a79ef8 100644
--- a/airflow/providers/amazon/aws/hooks/emr.py
+++ b/airflow/providers/amazon/aws/hooks/emr.py
@@ -15,9 +15,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import warnings
 from time import sleep
-from typing import Any, Callable, Dict, List, Optional, Set
+from typing import Any, Callable
 
 from botocore.exceptions import ClientError
 
@@ -48,7 +50,7 @@ class EmrHook(AwsBaseHook):
         kwargs["client_type"] = "emr"
         super().__init__(*args, **kwargs)
 
-    def get_cluster_id_by_name(self, emr_cluster_name: str, cluster_states: List[str]) -> Optional[str]:
+    def get_cluster_id_by_name(self, emr_cluster_name: str, cluster_states: list[str]) -> str | None:
         """
         Fetch id of EMR cluster with given name and (optional) states.
         Will return only if single id is found.
@@ -73,7 +75,7 @@ class EmrHook(AwsBaseHook):
             self.log.info('No cluster found for name %s', emr_cluster_name)
             return None
 
-    def create_job_flow(self, job_flow_overrides: Dict[str, Any]) -> Dict[str, Any]:
+    def create_job_flow(self, job_flow_overrides: dict[str, Any]) -> dict[str, Any]:
         """
         Creates a job flow using the config from the EMR connection.
         Keys of the json extra hash may have the arguments of the boto3
@@ -116,10 +118,10 @@ class EmrServerlessHook(AwsBaseHook):
     def waiter(
         self,
         get_state_callable: Callable,
-        get_state_args: Dict,
-        parse_response: List,
-        desired_state: Set,
-        failure_states: Set,
+        get_state_args: dict,
+        parse_response: list,
+        desired_state: set,
+        failure_states: set,
         object_type: str,
         action: str,
         countdown: int = 25 * 60,
@@ -194,7 +196,7 @@ class EmrContainerHook(AwsBaseHook):
         "CANCEL_PENDING",
     )
 
-    def __init__(self, *args: Any, virtual_cluster_id: Optional[str] = None, **kwargs: Any) -> None:
+    def __init__(self, *args: Any, virtual_cluster_id: str | None = None, **kwargs: Any) -> None:
         super().__init__(client_type="emr-containers", *args, **kwargs)  # type: ignore
         self.virtual_cluster_id = virtual_cluster_id
 
@@ -203,7 +205,7 @@ class EmrContainerHook(AwsBaseHook):
         virtual_cluster_name: str,
         eks_cluster_name: str,
         eks_namespace: str,
-        tags: Optional[dict] = None,
+        tags: dict | None = None,
     ) -> str:
         response = self.conn.create_virtual_cluster(
             name=virtual_cluster_name,
@@ -230,9 +232,9 @@ class EmrContainerHook(AwsBaseHook):
         execution_role_arn: str,
         release_label: str,
         job_driver: dict,
-        configuration_overrides: Optional[dict] = None,
-        client_request_token: Optional[str] = None,
-        tags: Optional[dict] = None,
+        configuration_overrides: dict | None = None,
+        client_request_token: str | None = None,
+        tags: dict | None = None,
     ) -> str:
         """
         Submit a job to the EMR Containers API and return the job ID.
@@ -275,7 +277,7 @@ class EmrContainerHook(AwsBaseHook):
             )
             return response['id']
 
-    def get_job_failure_reason(self, job_id: str) -> Optional[str]:
+    def get_job_failure_reason(self, job_id: str) -> str | None:
         """
         Fetch the reason for a job failure (e.g. error message). Returns None or reason string.
 
@@ -300,7 +302,7 @@ class EmrContainerHook(AwsBaseHook):
 
         return reason
 
-    def check_query_status(self, job_id: str) -> Optional[str]:
+    def check_query_status(self, job_id: str) -> str | None:
         """
         Fetch the status of submitted job run. Returns None or one of valid query states.
         See: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/emr-containers.html#EMRContainers.Client.describe_job_run  # noqa: E501
@@ -324,10 +326,10 @@ class EmrContainerHook(AwsBaseHook):
     def poll_query_status(
         self,
         job_id: str,
-        max_tries: Optional[int] = None,
+        max_tries: int | None = None,
         poll_interval: int = 30,
-        max_polling_attempts: Optional[int] = None,
-    ) -> Optional[str]:
+        max_polling_attempts: int | None = None,
+    ) -> str | None:
         """
         Poll the status of submitted job run until query state reaches final state.
         Returns one of the final states.
@@ -372,7 +374,7 @@ class EmrContainerHook(AwsBaseHook):
             sleep(poll_interval)
         return final_query_state
 
-    def stop_query(self, job_id: str) -> Dict:
+    def stop_query(self, job_id: str) -> dict:
         """
         Cancel the submitted job_run
 
diff --git a/airflow/providers/amazon/aws/hooks/glacier.py b/airflow/providers/amazon/aws/hooks/glacier.py
index 00c4b884ae..4f68559d46 100644
--- a/airflow/providers/amazon/aws/hooks/glacier.py
+++ b/airflow/providers/amazon/aws/hooks/glacier.py
@@ -15,9 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-
-from typing import Any, Dict
+from typing import Any
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
@@ -29,7 +29,7 @@ class GlacierHook(AwsBaseHook):
         super().__init__(client_type="glacier")
         self.aws_conn_id = aws_conn_id
 
-    def retrieve_inventory(self, vault_name: str) -> Dict[str, Any]:
+    def retrieve_inventory(self, vault_name: str) -> dict[str, Any]:
         """
         Initiate an Amazon Glacier inventory-retrieval job
 
@@ -42,7 +42,7 @@ class GlacierHook(AwsBaseHook):
         self.log.info("Retrieval Job ID: %s", response["jobId"])
         return response
 
-    def retrieve_inventory_results(self, vault_name: str, job_id: str) -> Dict[str, Any]:
+    def retrieve_inventory_results(self, vault_name: str, job_id: str) -> dict[str, Any]:
         """
         Retrieve the results of an Amazon Glacier inventory-retrieval job
 
@@ -53,7 +53,7 @@ class GlacierHook(AwsBaseHook):
         response = self.get_conn().get_job_output(vaultName=vault_name, jobId=job_id)
         return response
 
-    def describe_job(self, vault_name: str, job_id: str) -> Dict[str, Any]:
+    def describe_job(self, vault_name: str, job_id: str) -> dict[str, Any]:
         """
         Retrieve the status of an Amazon S3 Glacier job, such as an
         inventory-retrieval job
diff --git a/airflow/providers/amazon/aws/hooks/glue.py b/airflow/providers/amazon/aws/hooks/glue.py
index adbe9626e6..7cbf08c864 100644
--- a/airflow/providers/amazon/aws/hooks/glue.py
+++ b/airflow/providers/amazon/aws/hooks/glue.py
@@ -15,9 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import time
-from typing import Dict, List, Optional
 
 import boto3
 
@@ -52,15 +52,15 @@ class GlueJobHook(AwsBaseHook):
 
     def __init__(
         self,
-        s3_bucket: Optional[str] = None,
-        job_name: Optional[str] = None,
-        desc: Optional[str] = None,
+        s3_bucket: str | None = None,
+        job_name: str | None = None,
+        desc: str | None = None,
         concurrent_run_limit: int = 1,
-        script_location: Optional[str] = None,
+        script_location: str | None = None,
         retry_limit: int = 0,
-        num_of_dpus: Optional[int] = None,
-        iam_role_name: Optional[str] = None,
-        create_job_kwargs: Optional[dict] = None,
+        num_of_dpus: int | None = None,
+        iam_role_name: str | None = None,
+        create_job_kwargs: dict | None = None,
         *args,
         **kwargs,
     ):
@@ -92,12 +92,12 @@ class GlueJobHook(AwsBaseHook):
         kwargs['client_type'] = 'glue'
         super().__init__(*args, **kwargs)
 
-    def list_jobs(self) -> List:
+    def list_jobs(self) -> list:
         """:return: Lists of Jobs"""
         conn = self.get_conn()
         return conn.get_jobs()
 
-    def get_iam_execution_role(self) -> Dict:
+    def get_iam_execution_role(self) -> dict:
         """:return: iam role for job execution"""
         try:
             iam_client = self.get_session(region_name=self.region_name).client(
@@ -112,9 +112,9 @@ class GlueJobHook(AwsBaseHook):
 
     def initialize_job(
         self,
-        script_arguments: Optional[dict] = None,
-        run_kwargs: Optional[dict] = None,
-    ) -> Dict[str, str]:
+        script_arguments: dict | None = None,
+        run_kwargs: dict | None = None,
+    ) -> dict[str, str]:
         """
         Initializes connection with AWS Glue
         to run job
@@ -149,8 +149,8 @@ class GlueJobHook(AwsBaseHook):
         job_name: str,
         run_id: str,
         job_failed: bool = False,
-        next_token: Optional[str] = None,
-    ) -> Optional[str]:
+        next_token: str | None = None,
+    ) -> str | None:
         """Prints the batch of logs to the Airflow task log and returns nextToken."""
         log_client = boto3.client('logs')
         response = {}
@@ -189,7 +189,7 @@ class GlueJobHook(AwsBaseHook):
         # In that case, check the same token again next pass.
         return response.get('nextToken') or next_token
 
-    def job_completion(self, job_name: str, run_id: str, verbose: bool = False) -> Dict[str, str]:
+    def job_completion(self, job_name: str, run_id: str, verbose: bool = False) -> dict[str, str]:
         """
         Waits until Glue job with job_name completes or
         fails and return final state if finished.
diff --git a/airflow/providers/amazon/aws/hooks/glue_catalog.py b/airflow/providers/amazon/aws/hooks/glue_catalog.py
index c02d1ed6d3..d2bc034e45 100644
--- a/airflow/providers/amazon/aws/hooks/glue_catalog.py
+++ b/airflow/providers/amazon/aws/hooks/glue_catalog.py
@@ -15,9 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains AWS Glue Catalog Hook"""
-from typing import Dict, List, Optional, Set
+from __future__ import annotations
 
 from botocore.exceptions import ClientError
 
@@ -44,9 +43,9 @@ class GlueCatalogHook(AwsBaseHook):
         database_name: str,
         table_name: str,
         expression: str = '',
-        page_size: Optional[int] = None,
-        max_items: Optional[int] = None,
-    ) -> Set[tuple]:
+        page_size: int | None = None,
+        max_items: int | None = None,
+    ) -> set[tuple]:
         """
         Retrieves the partition values for a table.
 
@@ -125,7 +124,7 @@ class GlueCatalogHook(AwsBaseHook):
 
         return table['StorageDescriptor']['Location']
 
-    def get_partition(self, database_name: str, table_name: str, partition_values: List[str]) -> Dict:
+    def get_partition(self, database_name: str, table_name: str, partition_values: list[str]) -> dict:
         """
         Gets a Partition
 
@@ -152,7 +151,7 @@ class GlueCatalogHook(AwsBaseHook):
             self.log.error("Client error: %s", e)
             raise AirflowException("AWS request failed, check logs for more info")
 
-    def create_partition(self, database_name: str, table_name: str, partition_input: Dict) -> Dict:
+    def create_partition(self, database_name: str, table_name: str, partition_input: dict) -> dict:
         """
         Creates a new Partition
 
diff --git a/airflow/providers/amazon/aws/hooks/glue_crawler.py b/airflow/providers/amazon/aws/hooks/glue_crawler.py
index 03a7f926f6..b8009aecb1 100644
--- a/airflow/providers/amazon/aws/hooks/glue_crawler.py
+++ b/airflow/providers/amazon/aws/hooks/glue_crawler.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from time import sleep
 
 from airflow.compat.functools import cached_property
diff --git a/airflow/providers/amazon/aws/hooks/kinesis.py b/airflow/providers/amazon/aws/hooks/kinesis.py
index 7e40016572..71fe675466 100644
--- a/airflow/providers/amazon/aws/hooks/kinesis.py
+++ b/airflow/providers/amazon/aws/hooks/kinesis.py
@@ -15,8 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains AWS Firehose hook"""
+from __future__ import annotations
+
 from typing import Iterable
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
diff --git a/airflow/providers/amazon/aws/hooks/lambda_function.py b/airflow/providers/amazon/aws/hooks/lambda_function.py
index edf5fdc50b..2919d37701 100644
--- a/airflow/providers/amazon/aws/hooks/lambda_function.py
+++ b/airflow/providers/amazon/aws/hooks/lambda_function.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains AWS Lambda hook"""
-from typing import Any, List, Optional
+from __future__ import annotations
+
+from typing import Any
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
@@ -50,11 +51,11 @@ class LambdaHook(AwsBaseHook):
         self,
         *,
         function_name: str,
-        invocation_type: Optional[str] = None,
-        log_type: Optional[str] = None,
-        client_context: Optional[str] = None,
-        payload: Optional[str] = None,
-        qualifier: Optional[str] = None,
+        invocation_type: str | None = None,
+        log_type: str | None = None,
+        client_context: str | None = None,
+        payload: str | None = None,
+        qualifier: str | None = None,
     ):
         """Invoke Lambda Function. Refer to the boto3 documentation for more info."""
         invoke_args = {
@@ -75,22 +76,22 @@ class LambdaHook(AwsBaseHook):
         role: str,
         handler: str,
         code: dict,
-        description: Optional[str] = None,
-        timeout: Optional[int] = None,
-        memory_size: Optional[int] = None,
-        publish: Optional[bool] = None,
-        vpc_config: Optional[Any] = None,
-        package_type: Optional[str] = None,
-        dead_letter_config: Optional[Any] = None,
-        environment: Optional[Any] = None,
-        kms_key_arn: Optional[str] = None,
-        tracing_config: Optional[Any] = None,
-        tags: Optional[Any] = None,
-        layers: Optional[list] = None,
-        file_system_configs: Optional[List[Any]] = None,
-        image_config: Optional[Any] = None,
-        code_signing_config_arn: Optional[str] = None,
-        architectures: Optional[List[str]] = None,
+        description: str | None = None,
+        timeout: int | None = None,
+        memory_size: int | None = None,
+        publish: bool | None = None,
+        vpc_config: Any | None = None,
+        package_type: str | None = None,
+        dead_letter_config: Any | None = None,
+        environment: Any | None = None,
+        kms_key_arn: str | None = None,
+        tracing_config: Any | None = None,
+        tags: Any | None = None,
+        layers: list | None = None,
+        file_system_configs: list[Any] | None = None,
+        image_config: Any | None = None,
+        code_signing_config_arn: str | None = None,
+        architectures: list[str] | None = None,
     ) -> dict:
         """Create a Lambda Function"""
         create_function_args = {
diff --git a/airflow/providers/amazon/aws/hooks/logs.py b/airflow/providers/amazon/aws/hooks/logs.py
index 1c5e5e62fb..41ab239c1d 100644
--- a/airflow/providers/amazon/aws/hooks/logs.py
+++ b/airflow/providers/amazon/aws/hooks/logs.py
@@ -15,12 +15,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """
 This module contains a hook (AwsLogsHook) with some very basic
 functionality for interacting with AWS CloudWatch.
 """
-from typing import Dict, Generator, Optional
+from __future__ import annotations
+
+from typing import Generator
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
@@ -68,7 +69,7 @@ class AwsLogsHook(AwsBaseHook):
         next_token = None
         while True:
             if next_token is not None:
-                token_arg: Optional[Dict[str, str]] = {'nextToken': next_token}
+                token_arg: dict[str, str] | None = {'nextToken': next_token}
             else:
                 token_arg = {}
 
diff --git a/airflow/providers/amazon/aws/hooks/quicksight.py b/airflow/providers/amazon/aws/hooks/quicksight.py
index 2058661d0a..fb553e3b43 100644
--- a/airflow/providers/amazon/aws/hooks/quicksight.py
+++ b/airflow/providers/amazon/aws/hooks/quicksight.py
@@ -15,6 +15,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import time
 
diff --git a/airflow/providers/amazon/aws/hooks/rds.py b/airflow/providers/amazon/aws/hooks/rds.py
index 45fc99bd57..ca7b9e4ea9 100644
--- a/airflow/providers/amazon/aws/hooks/rds.py
+++ b/airflow/providers/amazon/aws/hooks/rds.py
@@ -16,6 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 """Interact with AWS RDS."""
+from __future__ import annotations
 
 from typing import TYPE_CHECKING
 
diff --git a/airflow/providers/amazon/aws/hooks/redshift_cluster.py b/airflow/providers/amazon/aws/hooks/redshift_cluster.py
index aaaa803ae4..be5d608808 100644
--- a/airflow/providers/amazon/aws/hooks/redshift_cluster.py
+++ b/airflow/providers/amazon/aws/hooks/redshift_cluster.py
@@ -14,8 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-from typing import Any, Dict, List, Optional
+from typing import Any
 
 from botocore.exceptions import ClientError
 
@@ -45,8 +46,8 @@ class RedshiftHook(AwsBaseHook):
         node_type: str,
         master_username: str,
         master_user_password: str,
-        params: Dict[str, Any],
-    ) -> Dict[str, Any]:
+        params: dict[str, Any],
+    ) -> dict[str, Any]:
         """
         Creates a new cluster with the specified parameters
 
@@ -92,7 +93,7 @@ class RedshiftHook(AwsBaseHook):
         self,
         cluster_identifier: str,
         skip_final_cluster_snapshot: bool = True,
-        final_cluster_snapshot_identifier: Optional[str] = None,
+        final_cluster_snapshot_identifier: str | None = None,
     ):
         """
         Delete a cluster and optionally create a snapshot
@@ -110,7 +111,7 @@ class RedshiftHook(AwsBaseHook):
         )
         return response['Cluster'] if response['Cluster'] else None
 
-    def describe_cluster_snapshots(self, cluster_identifier: str) -> Optional[List[str]]:
+    def describe_cluster_snapshots(self, cluster_identifier: str) -> list[str] | None:
         """
         Gets a list of snapshots for a cluster
 
diff --git a/airflow/providers/amazon/aws/hooks/redshift_data.py b/airflow/providers/amazon/aws/hooks/redshift_data.py
index e9a154368a..59a0380df3 100644
--- a/airflow/providers/amazon/aws/hooks/redshift_data.py
+++ b/airflow/providers/amazon/aws/hooks/redshift_data.py
@@ -15,6 +15,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from typing import TYPE_CHECKING
 
diff --git a/airflow/providers/amazon/aws/hooks/redshift_sql.py b/airflow/providers/amazon/aws/hooks/redshift_sql.py
index 304e5cb898..608899a914 100644
--- a/airflow/providers/amazon/aws/hooks/redshift_sql.py
+++ b/airflow/providers/amazon/aws/hooks/redshift_sql.py
@@ -14,8 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-from typing import Dict, List, Optional, Union
+from __future__ import annotations
 
 import redshift_connector
 from redshift_connector import Connection as RedshiftConnection
@@ -46,7 +45,7 @@ class RedshiftSQLHook(DbApiHook):
     supports_autocommit = True
 
     @staticmethod
-    def get_ui_field_behavior() -> Dict:
+    def get_ui_field_behavior() -> dict:
         """Returns custom field behavior"""
         return {
             "hidden_fields": [],
@@ -57,11 +56,11 @@ class RedshiftSQLHook(DbApiHook):
     def conn(self):
         return self.get_connection(self.redshift_conn_id)  # type: ignore[attr-defined]
 
-    def _get_conn_params(self) -> Dict[str, Union[str, int]]:
+    def _get_conn_params(self) -> dict[str, str | int]:
         """Helper method to retrieve connection args"""
         conn = self.conn
 
-        conn_params: Dict[str, Union[str, int]] = {}
+        conn_params: dict[str, str | int] = {}
 
         if conn.login:
             conn_params['user'] = conn.login
@@ -98,7 +97,7 @@ class RedshiftSQLHook(DbApiHook):
 
         return create_engine(self.get_uri(), **engine_kwargs)
 
-    def get_table_primary_key(self, table: str, schema: Optional[str] = "public") -> Optional[List[str]]:
+    def get_table_primary_key(self, table: str, schema: str | None = "public") -> list[str] | None:
         """
         Helper method that returns the table primary key
         :param table: Name of the target table
@@ -124,5 +123,5 @@ class RedshiftSQLHook(DbApiHook):
         """Returns a redshift_connector.Connection object"""
         conn_params = self._get_conn_params()
         conn_kwargs_dejson = self.conn.extra_dejson
-        conn_kwargs: Dict = {**conn_params, **conn_kwargs_dejson}
+        conn_kwargs: dict = {**conn_params, **conn_kwargs_dejson}
         return redshift_connector.connect(**conn_kwargs)
diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py
index 0ce3296608..48b4c6e060 100644
--- a/airflow/providers/amazon/aws/hooks/s3.py
+++ b/airflow/providers/amazon/aws/hooks/s3.py
@@ -15,9 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
-
 """Interact with AWS S3, using the boto3 library."""
+from __future__ import annotations
+
 import fnmatch
 import gzip as gz
 import io
@@ -30,7 +30,7 @@ from inspect import signature
 from io import BytesIO
 from pathlib import Path
 from tempfile import NamedTemporaryFile
-from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union, cast
+from typing import Any, Callable, List, TypeVar, cast
 from urllib.parse import urlparse
 
 from boto3.s3.transfer import S3Transfer, TransferConfig
@@ -119,9 +119,9 @@ class S3Hook(AwsBaseHook):
 
     def __init__(
         self,
-        aws_conn_id: Optional[str] = AwsBaseHook.default_conn_name,
-        transfer_config_args: Optional[Dict] = None,
-        extra_args: Optional[Dict] = None,
+        aws_conn_id: str | None = AwsBaseHook.default_conn_name,
+        transfer_config_args: dict | None = None,
+        extra_args: dict | None = None,
         *args,
         **kwargs,
     ) -> None:
@@ -144,7 +144,7 @@ class S3Hook(AwsBaseHook):
         return deepcopy(self._extra_args)
 
     @staticmethod
-    def parse_s3_url(s3url: str) -> Tuple[str, str]:
+    def parse_s3_url(s3url: str) -> tuple[str, str]:
         """
         Parses the S3 Url into a bucket name and key.
 
@@ -164,8 +164,8 @@ class S3Hook(AwsBaseHook):
 
     @staticmethod
     def get_s3_bucket_key(
-        bucket: Optional[str], key: str, bucket_param_name: str, key_param_name: str
-    ) -> Tuple[str, str]:
+        bucket: str | None, key: str, bucket_param_name: str, key_param_name: str
+    ) -> tuple[str, str]:
         """
         Get the S3 bucket name and key from either:
             - bucket name and key. Return the info as it is after checking `key` is a relative path
@@ -191,7 +191,7 @@ class S3Hook(AwsBaseHook):
         return bucket, key
 
     @provide_bucket_name
-    def check_for_bucket(self, bucket_name: Optional[str] = None) -> bool:
+    def check_for_bucket(self, bucket_name: str | None = None) -> bool:
         """
         Check if bucket_name exists.
 
@@ -219,7 +219,7 @@ class S3Hook(AwsBaseHook):
             return False
 
     @provide_bucket_name
-    def get_bucket(self, bucket_name: Optional[str] = None) -> object:
+    def get_bucket(self, bucket_name: str | None = None) -> object:
         """
         Returns a boto3.S3.Bucket object
 
@@ -236,7 +236,7 @@ class S3Hook(AwsBaseHook):
         return s3_resource.Bucket(bucket_name)
 
     @provide_bucket_name
-    def create_bucket(self, bucket_name: Optional[str] = None, region_name: Optional[str] = None) -> None:
+    def create_bucket(self, bucket_name: str | None = None, region_name: str | None = None) -> None:
         """
         Creates an Amazon S3 bucket.
 
@@ -259,7 +259,7 @@ class S3Hook(AwsBaseHook):
             )
 
     @provide_bucket_name
-    def check_for_prefix(self, prefix: str, delimiter: str, bucket_name: Optional[str] = None) -> bool:
+    def check_for_prefix(self, prefix: str, delimiter: str, bucket_name: str | None = None) -> bool:
         """
         Checks that a prefix exists in a bucket
 
@@ -278,11 +278,11 @@ class S3Hook(AwsBaseHook):
     @provide_bucket_name
     def list_prefixes(
         self,
-        bucket_name: Optional[str] = None,
-        prefix: Optional[str] = None,
-        delimiter: Optional[str] = None,
-        page_size: Optional[int] = None,
-        max_items: Optional[int] = None,
+        bucket_name: str | None = None,
+        prefix: str | None = None,
+        delimiter: str | None = None,
+        page_size: int | None = None,
+        max_items: int | None = None,
     ) -> list:
         """
         Lists prefixes in a bucket under prefix
@@ -315,7 +315,7 @@ class S3Hook(AwsBaseHook):
         return prefixes
 
     def _list_key_object_filter(
-        self, keys: list, from_datetime: Optional[datetime] = None, to_datetime: Optional[datetime] = None
+        self, keys: list, from_datetime: datetime | None = None, to_datetime: datetime | None = None
     ) -> list:
         def _is_in_period(input_date: datetime) -> bool:
             if from_datetime is not None and input_date <= from_datetime:
@@ -329,15 +329,15 @@ class S3Hook(AwsBaseHook):
     @provide_bucket_name
     def list_keys(
         self,
-        bucket_name: Optional[str] = None,
-        prefix: Optional[str] = None,
-        delimiter: Optional[str] = None,
-        page_size: Optional[int] = None,
-        max_items: Optional[int] = None,
-        start_after_key: Optional[str] = None,
-        from_datetime: Optional[datetime] = None,
-        to_datetime: Optional[datetime] = None,
-        object_filter: Optional[Callable[..., list]] = None,
+        bucket_name: str | None = None,
+        prefix: str | None = None,
+        delimiter: str | None = None,
+        page_size: int | None = None,
+        max_items: int | None = None,
+        start_after_key: str | None = None,
+        from_datetime: datetime | None = None,
+        to_datetime: datetime | None = None,
+        object_filter: Callable[..., list] | None = None,
     ) -> list:
         """
         Lists keys in a bucket under prefix and not containing delimiter
@@ -408,10 +408,10 @@ class S3Hook(AwsBaseHook):
     def get_file_metadata(
         self,
         prefix: str,
-        bucket_name: Optional[str] = None,
-        page_size: Optional[int] = None,
-        max_items: Optional[int] = None,
-    ) -> List:
+        bucket_name: str | None = None,
+        page_size: int | None = None,
+        max_items: int | None = None,
+    ) -> list:
         """
         Lists metadata objects in a bucket under prefix
 
@@ -438,7 +438,7 @@ class S3Hook(AwsBaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def head_object(self, key: str, bucket_name: Optional[str] = None) -> Optional[dict]:
+    def head_object(self, key: str, bucket_name: str | None = None) -> dict | None:
         """
         Retrieves metadata of an object
 
@@ -457,7 +457,7 @@ class S3Hook(AwsBaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def check_for_key(self, key: str, bucket_name: Optional[str] = None) -> bool:
+    def check_for_key(self, key: str, bucket_name: str | None = None) -> bool:
         """
         Checks if a key exists in a bucket
 
@@ -471,7 +471,7 @@ class S3Hook(AwsBaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def get_key(self, key: str, bucket_name: Optional[str] = None) -> S3Transfer:
+    def get_key(self, key: str, bucket_name: str | None = None) -> S3Transfer:
         """
         Returns a boto3.s3.Object
 
@@ -492,7 +492,7 @@ class S3Hook(AwsBaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def read_key(self, key: str, bucket_name: Optional[str] = None) -> str:
+    def read_key(self, key: str, bucket_name: str | None = None) -> str:
         """
         Reads a key from S3
 
@@ -509,11 +509,11 @@ class S3Hook(AwsBaseHook):
     def select_key(
         self,
         key: str,
-        bucket_name: Optional[str] = None,
-        expression: Optional[str] = None,
-        expression_type: Optional[str] = None,
-        input_serialization: Optional[Dict[str, Any]] = None,
-        output_serialization: Optional[Dict[str, Any]] = None,
+        bucket_name: str | None = None,
+        expression: str | None = None,
+        expression_type: str | None = None,
+        input_serialization: dict[str, Any] | None = None,
+        output_serialization: dict[str, Any] | None = None,
     ) -> str:
         """
         Reads a key with S3 Select.
@@ -555,7 +555,7 @@ class S3Hook(AwsBaseHook):
     @provide_bucket_name
     @unify_bucket_name_and_key
     def check_for_wildcard_key(
-        self, wildcard_key: str, bucket_name: Optional[str] = None, delimiter: str = ''
+        self, wildcard_key: str, bucket_name: str | None = None, delimiter: str = ''
     ) -> bool:
         """
         Checks that a key matching a wildcard expression exists in a bucket
@@ -574,7 +574,7 @@ class S3Hook(AwsBaseHook):
     @provide_bucket_name
     @unify_bucket_name_and_key
     def get_wildcard_key(
-        self, wildcard_key: str, bucket_name: Optional[str] = None, delimiter: str = ''
+        self, wildcard_key: str, bucket_name: str | None = None, delimiter: str = ''
     ) -> S3Transfer:
         """
         Returns a boto3.s3.Object object matching the wildcard expression
@@ -596,13 +596,13 @@ class S3Hook(AwsBaseHook):
     @unify_bucket_name_and_key
     def load_file(
         self,
-        filename: Union[Path, str],
+        filename: Path | str,
         key: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         replace: bool = False,
         encrypt: bool = False,
         gzip: bool = False,
-        acl_policy: Optional[str] = None,
+        acl_policy: str | None = None,
     ) -> None:
         """
         Loads a local file to S3
@@ -644,12 +644,12 @@ class S3Hook(AwsBaseHook):
         self,
         string_data: str,
         key: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         replace: bool = False,
         encrypt: bool = False,
-        encoding: Optional[str] = None,
-        acl_policy: Optional[str] = None,
-        compression: Optional[str] = None,
+        encoding: str | None = None,
+        acl_policy: str | None = None,
+        compression: str | None = None,
     ) -> None:
         """
         Loads a string to S3
@@ -694,10 +694,10 @@ class S3Hook(AwsBaseHook):
         self,
         bytes_data: bytes,
         key: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         replace: bool = False,
         encrypt: bool = False,
-        acl_policy: Optional[str] = None,
+        acl_policy: str | None = None,
     ) -> None:
         """
         Loads bytes to S3
@@ -725,10 +725,10 @@ class S3Hook(AwsBaseHook):
         self,
         file_obj: BytesIO,
         key: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         replace: bool = False,
         encrypt: bool = False,
-        acl_policy: Optional[str] = None,
+        acl_policy: str | None = None,
     ) -> None:
         """
         Loads a file object to S3
@@ -749,10 +749,10 @@ class S3Hook(AwsBaseHook):
         self,
         file_obj: BytesIO,
         key: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         replace: bool = False,
         encrypt: bool = False,
-        acl_policy: Optional[str] = None,
+        acl_policy: str | None = None,
     ) -> None:
         if not replace and self.check_for_key(key, bucket_name):
             raise ValueError(f"The key {key} already exists.")
@@ -776,10 +776,10 @@ class S3Hook(AwsBaseHook):
         self,
         source_bucket_key: str,
         dest_bucket_key: str,
-        source_bucket_name: Optional[str] = None,
-        dest_bucket_name: Optional[str] = None,
-        source_version_id: Optional[str] = None,
-        acl_policy: Optional[str] = None,
+        source_bucket_name: str | None = None,
+        dest_bucket_name: str | None = None,
+        source_version_id: str | None = None,
+        acl_policy: str | None = None,
     ) -> None:
         """
         Creates a copy of an object that is already stored in S3.
@@ -838,7 +838,7 @@ class S3Hook(AwsBaseHook):
                 self.delete_objects(bucket=bucket_name, keys=bucket_keys)
         self.conn.delete_bucket(Bucket=bucket_name)
 
-    def delete_objects(self, bucket: str, keys: Union[str, list]) -> None:
+    def delete_objects(self, bucket: str, keys: str | list) -> None:
         """
         Delete keys from the bucket.
 
@@ -869,9 +869,7 @@ class S3Hook(AwsBaseHook):
 
     @provide_bucket_name
     @unify_bucket_name_and_key
-    def download_file(
-        self, key: str, bucket_name: Optional[str] = None, local_path: Optional[str] = None
-    ) -> str:
+    def download_file(self, key: str, bucket_name: str | None = None, local_path: str | None = None) -> str:
         """
         Downloads a file from the S3 location to the local file system.
 
@@ -906,10 +904,10 @@ class S3Hook(AwsBaseHook):
     def generate_presigned_url(
         self,
         client_method: str,
-        params: Optional[dict] = None,
+        params: dict | None = None,
         expires_in: int = 3600,
-        http_method: Optional[str] = None,
-    ) -> Optional[str]:
+        http_method: str | None = None,
+    ) -> str | None:
         """
         Generate a presigned url given a client, its method, and arguments
 
@@ -933,7 +931,7 @@ class S3Hook(AwsBaseHook):
             return None
 
     @provide_bucket_name
-    def get_bucket_tagging(self, bucket_name: Optional[str] = None) -> Optional[List[Dict[str, str]]]:
+    def get_bucket_tagging(self, bucket_name: str | None = None) -> list[dict[str, str]] | None:
         """
         Gets a List of tags from a bucket.
 
@@ -953,10 +951,10 @@ class S3Hook(AwsBaseHook):
     @provide_bucket_name
     def put_bucket_tagging(
         self,
-        tag_set: Optional[List[Dict[str, str]]] = None,
-        key: Optional[str] = None,
-        value: Optional[str] = None,
-        bucket_name: Optional[str] = None,
+        tag_set: list[dict[str, str]] | None = None,
+        key: str | None = None,
+        value: str | None = None,
+        bucket_name: str | None = None,
     ) -> None:
         """
         Overwrites the existing TagSet with provided tags.  Must provide either a TagSet or a key/value pair.
@@ -986,7 +984,7 @@ class S3Hook(AwsBaseHook):
             raise e
 
     @provide_bucket_name
-    def delete_bucket_tagging(self, bucket_name: Optional[str] = None) -> None:
+    def delete_bucket_tagging(self, bucket_name: str | None = None) -> None:
         """
         Deletes all tags from a bucket.
 
diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py
index f5f883eb23..851a96c3f8 100644
--- a/airflow/providers/amazon/aws/hooks/sagemaker.py
+++ b/airflow/providers/amazon/aws/hooks/sagemaker.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import collections
 import os
 import tarfile
@@ -22,7 +24,7 @@ import tempfile
 import time
 from datetime import datetime
 from functools import partial
-from typing import Any, Callable, Dict, Generator, List, Optional, Set, Tuple, cast
+from typing import Any, Callable, Generator, cast
 
 from botocore.exceptions import ClientError
 
@@ -51,7 +53,7 @@ class LogState:
 Position = collections.namedtuple('Position', ['timestamp', 'skip'])
 
 
-def argmin(arr, f: Callable) -> Optional[int]:
+def argmin(arr, f: Callable) -> int | None:
     """Return the index, i, in arr that minimizes f(arr[i])"""
     min_value = None
     min_idx = None
@@ -93,7 +95,7 @@ def secondary_training_status_changed(current_job_description: dict, prev_job_de
 
 
 def secondary_training_status_message(
-    job_description: Dict[str, List[Any]], prev_description: Optional[dict]
+    job_description: dict[str, list[Any]], prev_description: dict | None
 ) -> str:
     """
     Returns a string contains start time and the secondary training job status message.
@@ -252,7 +254,7 @@ class SageMakerHook(AwsBaseHook):
             self.logs_hook.get_log_events(log_group, s, positions[s].timestamp, positions[s].skip)
             for s in streams
         ]
-        events: List[Optional[Any]] = []
+        events: list[Any | None] = []
         for event_stream in event_iters:
             if not event_stream:
                 events.append(None)
@@ -276,7 +278,7 @@ class SageMakerHook(AwsBaseHook):
         wait_for_completion: bool = True,
         print_log: bool = True,
         check_interval: int = 30,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
     ):
         """
         Starts a model training job. After training completes, Amazon SageMaker saves
@@ -324,7 +326,7 @@ class SageMakerHook(AwsBaseHook):
         config: dict,
         wait_for_completion: bool = True,
         check_interval: int = 30,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
     ):
         """
         Starts a hyperparameter tuning job. A hyperparameter tuning job finds the
@@ -360,7 +362,7 @@ class SageMakerHook(AwsBaseHook):
         config: dict,
         wait_for_completion: bool = True,
         check_interval: int = 30,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
     ):
         """
         Starts a transform job. A transform job uses a trained model to get inferences
@@ -394,7 +396,7 @@ class SageMakerHook(AwsBaseHook):
         config: dict,
         wait_for_completion: bool = True,
         check_interval: int = 30,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
     ):
         """
         Use Amazon SageMaker Processing to analyze data and evaluate machine learning
@@ -455,7 +457,7 @@ class SageMakerHook(AwsBaseHook):
         config: dict,
         wait_for_completion: bool = True,
         check_interval: int = 30,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
     ):
         """
         When you create a serverless endpoint, SageMaker provisions and manages
@@ -494,7 +496,7 @@ class SageMakerHook(AwsBaseHook):
         config: dict,
         wait_for_completion: bool = True,
         check_interval: int = 30,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
     ):
         """
         Deploys the new EndpointConfig specified in the request, switches to using
@@ -650,8 +652,8 @@ class SageMakerHook(AwsBaseHook):
         key: str,
         describe_function: Callable,
         check_interval: int,
-        max_ingestion_time: Optional[int] = None,
-        non_terminal_states: Optional[Set] = None,
+        max_ingestion_time: int | None = None,
+        non_terminal_states: set | None = None,
     ):
         """
         Check status of a SageMaker job
@@ -710,7 +712,7 @@ class SageMakerHook(AwsBaseHook):
         failed_states: set,
         wait_for_completion: bool,
         check_interval: int,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
     ):
         """
         Display the logs for a given training job, optionally tailing them until the
@@ -794,8 +796,8 @@ class SageMakerHook(AwsBaseHook):
             self.log.info('Billable seconds: %d', int(billable_time.total_seconds()) + 1)
 
     def list_training_jobs(
-        self, name_contains: Optional[str] = None, max_results: Optional[int] = None, **kwargs
-    ) -> List[Dict]:
+        self, name_contains: str | None = None, max_results: int | None = None, **kwargs
+    ) -> list[dict]:
         """
         This method wraps boto3's `list_training_jobs`. The training job name and max results are configurable
         via arguments. Other arguments are not, and should be provided via kwargs. Note boto3 expects these in
@@ -821,8 +823,8 @@ class SageMakerHook(AwsBaseHook):
         return results
 
     def list_transform_jobs(
-        self, name_contains: Optional[str] = None, max_results: Optional[int] = None, **kwargs
-    ) -> List[Dict]:
+        self, name_contains: str | None = None, max_results: int | None = None, **kwargs
+    ) -> list[dict]:
         """
         This method wraps boto3's `list_transform_jobs`.
         The transform job name and max results are configurable via arguments.
@@ -848,7 +850,7 @@ class SageMakerHook(AwsBaseHook):
         )
         return results
 
-    def list_processing_jobs(self, **kwargs) -> List[Dict]:
+    def list_processing_jobs(self, **kwargs) -> list[dict]:
         """
         This method wraps boto3's `list_processing_jobs`. All arguments should be provided via kwargs.
         Note boto3 expects these in CamelCase format, for example:
@@ -870,8 +872,8 @@ class SageMakerHook(AwsBaseHook):
         return results
 
     def _preprocess_list_request_args(
-        self, name_contains: Optional[str] = None, max_results: Optional[int] = None, **kwargs
-    ) -> Tuple[Dict[str, Any], Optional[int]]:
+        self, name_contains: str | None = None, max_results: int | None = None, **kwargs
+    ) -> tuple[dict[str, Any], int | None]:
         """
         This method preprocesses the arguments to the boto3's list_* methods.
         It will turn arguments name_contains and max_results as boto3 compliant CamelCase format.
@@ -901,8 +903,8 @@ class SageMakerHook(AwsBaseHook):
         return config, max_results
 
     def _list_request(
-        self, partial_func: Callable, result_key: str, max_results: Optional[int] = None
-    ) -> List[Dict]:
+        self, partial_func: Callable, result_key: str, max_results: int | None = None
+    ) -> list[dict]:
         """
         All AWS boto3 list_* requests return results in batches (if the key "NextToken" is contained in the
         result, there are more results to fetch). The default AWS batch size is 10, and configurable up to
@@ -919,7 +921,7 @@ class SageMakerHook(AwsBaseHook):
         """
         sagemaker_max_results = 100  # Fixed number set by AWS
 
-        results: List[Dict] = []
+        results: list[dict] = []
         next_token = None
 
         while True:
diff --git a/airflow/providers/amazon/aws/hooks/secrets_manager.py b/airflow/providers/amazon/aws/hooks/secrets_manager.py
index f1596d4961..9c4586f436 100644
--- a/airflow/providers/amazon/aws/hooks/secrets_manager.py
+++ b/airflow/providers/amazon/aws/hooks/secrets_manager.py
@@ -15,11 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
+from __future__ import annotations
 
 import base64
 import json
-from typing import Union
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
@@ -38,7 +37,7 @@ class SecretsManagerHook(AwsBaseHook):
     def __init__(self, *args, **kwargs):
         super().__init__(client_type='secretsmanager', *args, **kwargs)
 
-    def get_secret(self, secret_name: str) -> Union[str, bytes]:
+    def get_secret(self, secret_name: str) -> str | bytes:
         """
         Retrieve secret value from AWS Secrets Manager as a str or bytes
         reflecting format it stored in the AWS Secrets Manager
diff --git a/airflow/providers/amazon/aws/hooks/ses.py b/airflow/providers/amazon/aws/hooks/ses.py
index 7780331558..98ba4c81d1 100644
--- a/airflow/providers/amazon/aws/hooks/ses.py
+++ b/airflow/providers/amazon/aws/hooks/ses.py
@@ -15,7 +15,9 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module contains AWS SES Hook"""
-from typing import Any, Dict, Iterable, List, Optional, Union
+from __future__ import annotations
+
+from typing import Any, Iterable
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 from airflow.utils.email import build_mime_message
@@ -39,17 +41,17 @@ class SesHook(AwsBaseHook):
     def send_email(
         self,
         mail_from: str,
-        to: Union[str, Iterable[str]],
+        to: str | Iterable[str],
         subject: str,
         html_content: str,
-        files: Optional[List[str]] = None,
-        cc: Optional[Union[str, Iterable[str]]] = None,
-        bcc: Optional[Union[str, Iterable[str]]] = None,
+        files: list[str] | None = None,
+        cc: str | Iterable[str] | None = None,
+        bcc: str | Iterable[str] | None = None,
         mime_subtype: str = 'mixed',
         mime_charset: str = 'utf-8',
-        reply_to: Optional[str] = None,
-        return_path: Optional[str] = None,
-        custom_headers: Optional[Dict[str, Any]] = None,
+        reply_to: str | None = None,
+        return_path: str | None = None,
+        custom_headers: dict[str, Any] | None = None,
     ) -> dict:
         """
         Send email using Amazon Simple Email Service
diff --git a/airflow/providers/amazon/aws/hooks/sns.py b/airflow/providers/amazon/aws/hooks/sns.py
index 30750ea1c5..59d8f639ce 100644
--- a/airflow/providers/amazon/aws/hooks/sns.py
+++ b/airflow/providers/amazon/aws/hooks/sns.py
@@ -15,10 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains AWS SNS hook"""
+from __future__ import annotations
+
 import json
-from typing import Dict, Optional, Union
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
@@ -55,8 +55,8 @@ class SnsHook(AwsBaseHook):
         self,
         target_arn: str,
         message: str,
-        subject: Optional[str] = None,
-        message_attributes: Optional[dict] = None,
+        subject: str | None = None,
+        message_attributes: dict | None = None,
     ):
         """
         Publish a message to a topic or an endpoint.
@@ -74,7 +74,7 @@ class SnsHook(AwsBaseHook):
             - iterable = String.Array
 
         """
-        publish_kwargs: Dict[str, Union[str, dict]] = {
+        publish_kwargs: dict[str, str | dict] = {
             'TargetArn': target_arn,
             'MessageStructure': 'json',
             'Message': json.dumps({'default': message}),
diff --git a/airflow/providers/amazon/aws/hooks/sqs.py b/airflow/providers/amazon/aws/hooks/sqs.py
index bb1002a6cc..ec8651c3e8 100644
--- a/airflow/providers/amazon/aws/hooks/sqs.py
+++ b/airflow/providers/amazon/aws/hooks/sqs.py
@@ -15,9 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains AWS SQS hook"""
-from typing import Dict, Optional
+from __future__ import annotations
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
@@ -37,7 +36,7 @@ class SqsHook(AwsBaseHook):
         kwargs["client_type"] = "sqs"
         super().__init__(*args, **kwargs)
 
-    def create_queue(self, queue_name: str, attributes: Optional[Dict] = None) -> Dict:
+    def create_queue(self, queue_name: str, attributes: dict | None = None) -> dict:
         """
         Create queue using connection object
 
@@ -56,9 +55,9 @@ class SqsHook(AwsBaseHook):
         queue_url: str,
         message_body: str,
         delay_seconds: int = 0,
-        message_attributes: Optional[Dict] = None,
-        message_group_id: Optional[str] = None,
-    ) -> Dict:
+        message_attributes: dict | None = None,
+        message_group_id: str | None = None,
+    ) -> dict:
         """
         Send message to the queue
 
diff --git a/airflow/providers/amazon/aws/hooks/step_function.py b/airflow/providers/amazon/aws/hooks/step_function.py
index 0e0cb1ced7..f0223e74c5 100644
--- a/airflow/providers/amazon/aws/hooks/step_function.py
+++ b/airflow/providers/amazon/aws/hooks/step_function.py
@@ -14,9 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import json
-from typing import Optional, Union
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
@@ -39,8 +39,8 @@ class StepFunctionHook(AwsBaseHook):
     def start_execution(
         self,
         state_machine_arn: str,
-        name: Optional[str] = None,
-        state_machine_input: Union[dict, str, None] = None,
+        name: str | None = None,
+        state_machine_input: dict | str | None = None,
     ) -> str:
         """
         Start Execution of the State Machine.
diff --git a/airflow/providers/amazon/aws/hooks/sts.py b/airflow/providers/amazon/aws/hooks/sts.py
index 78ecad74d9..d1f92e215d 100644
--- a/airflow/providers/amazon/aws/hooks/sts.py
+++ b/airflow/providers/amazon/aws/hooks/sts.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 
diff --git a/airflow/providers/amazon/aws/links/base_aws.py b/airflow/providers/amazon/aws/links/base_aws.py
index 864567ceb7..973b746eeb 100644
--- a/airflow/providers/amazon/aws/links/base_aws.py
+++ b/airflow/providers/amazon/aws/links/base_aws.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from datetime import datetime
-from typing import TYPE_CHECKING, ClassVar, Optional
+from typing import TYPE_CHECKING, ClassVar
 
 from airflow.models import BaseOperatorLink, XCom
 
@@ -38,7 +39,7 @@ class BaseAwsLink(BaseOperatorLink):
     format_str: ClassVar[str]
 
     @staticmethod
-    def get_aws_domain(aws_partition) -> Optional[str]:
+    def get_aws_domain(aws_partition) -> str | None:
         if aws_partition == "aws":
             return "aws.amazon.com"
         elif aws_partition == "aws-cn":
@@ -63,8 +64,8 @@ class BaseAwsLink(BaseOperatorLink):
     def get_link(
         self,
         operator,
-        dttm: Optional[datetime] = None,
-        ti_key: Optional["TaskInstanceKey"] = None,
+        dttm: datetime | None = None,
+        ti_key: TaskInstanceKey | None = None,
     ) -> str:
         """
         Link to Amazon Web Services Console.
@@ -90,7 +91,7 @@ class BaseAwsLink(BaseOperatorLink):
 
     @classmethod
     def persist(
-        cls, context: "Context", operator: "BaseOperator", region_name: str, aws_partition: str, **kwargs
+        cls, context: Context, operator: BaseOperator, region_name: str, aws_partition: str, **kwargs
     ) -> None:
         """Store link information into XCom"""
         if not operator.do_xcom_push:
diff --git a/airflow/providers/amazon/aws/links/batch.py b/airflow/providers/amazon/aws/links/batch.py
index d4be3fae48..432d129a7c 100644
--- a/airflow/providers/amazon/aws/links/batch.py
+++ b/airflow/providers/amazon/aws/links/batch.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from airflow.providers.amazon.aws.links.base_aws import BASE_AWS_CONSOLE_LINK, BaseAwsLink
 
diff --git a/airflow/providers/amazon/aws/links/emr.py b/airflow/providers/amazon/aws/links/emr.py
index ea46341dd7..aa739567fb 100644
--- a/airflow/providers/amazon/aws/links/emr.py
+++ b/airflow/providers/amazon/aws/links/emr.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from airflow.providers.amazon.aws.links.base_aws import BASE_AWS_CONSOLE_LINK, BaseAwsLink
 
diff --git a/airflow/providers/amazon/aws/links/logs.py b/airflow/providers/amazon/aws/links/logs.py
index beb946684d..7998191d92 100644
--- a/airflow/providers/amazon/aws/links/logs.py
+++ b/airflow/providers/amazon/aws/links/logs.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from urllib.parse import quote_plus
 
diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
index c13ebd87e8..f2b25327aa 100644
--- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
+++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
@@ -15,8 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from datetime import datetime
-from typing import Optional
 
 import watchtower
 
@@ -38,7 +39,7 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
     :param filename_template: template for file name (local storage) or log stream name (remote)
     """
 
-    def __init__(self, base_log_folder: str, log_group_arn: str, filename_template: Optional[str] = None):
+    def __init__(self, base_log_folder: str, log_group_arn: str, filename_template: str | None = None):
         super().__init__(base_log_folder, filename_template)
         split_arn = log_group_arn.split(':')
 
diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py
index ae12b2ca2f..14ea3ba7a0 100644
--- a/airflow/providers/amazon/aws/log/s3_task_handler.py
+++ b/airflow/providers/amazon/aws/log/s3_task_handler.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import os
 import pathlib
-from typing import Optional
 
 from airflow.compat.functools import cached_property
 from airflow.configuration import conf
@@ -32,7 +33,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
     uploads to and reads from S3 remote storage.
     """
 
-    def __init__(self, base_log_folder: str, s3_log_folder: str, filename_template: Optional[str] = None):
+    def __init__(self, base_log_folder: str, s3_log_folder: str, filename_template: str | None = None):
         super().__init__(base_log_folder, filename_template)
         self.remote_base = s3_log_folder
         self.log_relative_path = ''
diff --git a/airflow/providers/amazon/aws/operators/appflow.py b/airflow/providers/amazon/aws/operators/appflow.py
index a63ec8ccdb..c7e16387ed 100644
--- a/airflow/providers/amazon/aws/operators/appflow.py
+++ b/airflow/providers/amazon/aws/operators/appflow.py
@@ -14,9 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from datetime import datetime, timedelta
-from typing import TYPE_CHECKING, List, Optional, cast
+from typing import TYPE_CHECKING, cast
 
 from airflow.compat.functools import cached_property
 from airflow.exceptions import AirflowException
@@ -61,11 +62,11 @@ class AppflowBaseOperator(BaseOperator):
         source: str,
         flow_name: str,
         flow_update: bool,
-        source_field: Optional[str] = None,
-        filter_date: Optional[str] = None,
+        source_field: str | None = None,
+        filter_date: str | None = None,
         poll_interval: int = 20,
         aws_conn_id: str = "aws_default",
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -85,8 +86,8 @@ class AppflowBaseOperator(BaseOperator):
         """Create and return an AppflowHook."""
         return AppflowHook(aws_conn_id=self.aws_conn_id, region_name=self.region)
 
-    def execute(self, context: "Context") -> None:
-        self.filter_date_parsed: Optional[datetime] = (
+    def execute(self, context: Context) -> None:
+        self.filter_date_parsed: datetime | None = (
             datetime.fromisoformat(self.filter_date) if self.filter_date else None
         )
         self.connector_type = self._get_connector_type()
@@ -132,7 +133,7 @@ class AppflowRunOperator(AppflowBaseOperator):
         flow_name: str,
         poll_interval: int = 20,
         aws_conn_id: str = "aws_default",
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         if source not in {"salesforce", "zendesk"}:
@@ -171,7 +172,7 @@ class AppflowRunFullOperator(AppflowBaseOperator):
         flow_name: str,
         poll_interval: int = 20,
         aws_conn_id: str = "aws_default",
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         if source not in {"salesforce", "zendesk"}:
@@ -216,7 +217,7 @@ class AppflowRunBeforeOperator(AppflowBaseOperator):
         filter_date: str,
         poll_interval: int = 20,
         aws_conn_id: str = "aws_default",
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         if not filter_date:
@@ -242,7 +243,7 @@ class AppflowRunBeforeOperator(AppflowBaseOperator):
             raise ValueError(f"Invalid filter_date argument parser value: {self.filter_date_parsed}")
         if not self.source_field:
             raise ValueError(f"Invalid source_field argument value: {self.source_field}")
-        filter_task: "TaskTypeDef" = {
+        filter_task: TaskTypeDef = {
             "taskType": "Filter",
             "connectorOperator": {self.connector_type: "LESS_THAN"},  # type: ignore
             "sourceFields": [self.source_field],
@@ -283,7 +284,7 @@ class AppflowRunAfterOperator(AppflowBaseOperator):
         filter_date: str,
         poll_interval: int = 20,
         aws_conn_id: str = "aws_default",
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         if not filter_date:
@@ -307,7 +308,7 @@ class AppflowRunAfterOperator(AppflowBaseOperator):
             raise ValueError(f"Invalid filter_date argument parser value: {self.filter_date_parsed}")
         if not self.source_field:
             raise ValueError(f"Invalid source_field argument value: {self.source_field}")
-        filter_task: "TaskTypeDef" = {
+        filter_task: TaskTypeDef = {
             "taskType": "Filter",
             "connectorOperator": {self.connector_type: "GREATER_THAN"},  # type: ignore
             "sourceFields": [self.source_field],
@@ -348,7 +349,7 @@ class AppflowRunDailyOperator(AppflowBaseOperator):
         filter_date: str,
         poll_interval: int = 20,
         aws_conn_id: str = "aws_default",
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         if not filter_date:
@@ -374,7 +375,7 @@ class AppflowRunDailyOperator(AppflowBaseOperator):
             raise ValueError(f"Invalid source_field argument value: {self.source_field}")
         start_filter_date = self.filter_date_parsed - timedelta(milliseconds=1)
         end_filter_date = self.filter_date_parsed + timedelta(days=1)
-        filter_task: "TaskTypeDef" = {
+        filter_task: TaskTypeDef = {
             "taskType": "Filter",
             "connectorOperator": {self.connector_type: "BETWEEN"},  # type: ignore
             "sourceFields": [self.source_field],
@@ -413,7 +414,7 @@ class AppflowRecordsShortCircuitOperator(ShortCircuitOperator):
         appflow_run_task_id: str,
         ignore_downstream_trigger_rules: bool = True,
         aws_conn_id: str = "aws_default",
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         if get_airflow_version() >= (2, 3):
@@ -436,8 +437,8 @@ class AppflowRecordsShortCircuitOperator(ShortCircuitOperator):
 
     @staticmethod
     def _get_target_execution_id(
-        records: List["ExecutionRecordTypeDef"], execution_id: str
-    ) -> Optional["ExecutionRecordTypeDef"]:
+        records: list[ExecutionRecordTypeDef], execution_id: str
+    ) -> ExecutionRecordTypeDef | None:
         for record in records:
             if record.get("executionId") == execution_id:
                 return record
@@ -460,7 +461,7 @@ class AppflowRecordsShortCircuitOperator(ShortCircuitOperator):
             raise AirflowException(f"No execution_id found from task_id {appflow_task_id}!")
         self.log.info("execution_id: %s", execution_id)
         args = {"flowName": flow_name, "maxResults": 100}
-        response: "DescribeFlowExecutionRecordsResponseTypeDef" = cast(
+        response: DescribeFlowExecutionRecordsResponseTypeDef = cast(
             "DescribeFlowExecutionRecordsResponseTypeDef", {}
         )
         record = None
diff --git a/airflow/providers/amazon/aws/operators/athena.py b/airflow/providers/amazon/aws/operators/athena.py
index 6fd01324fc..bed75e224c 100644
--- a/airflow/providers/amazon/aws/operators/athena.py
+++ b/airflow/providers/amazon/aws/operators/athena.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
+from __future__ import annotations
+
 import warnings
-from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence
+from typing import TYPE_CHECKING, Any, Optional, Sequence
 
 from airflow.compat.functools import cached_property
 from airflow.models import BaseOperator
@@ -61,13 +62,13 @@ class AthenaOperator(BaseOperator):
         database: str,
         output_location: str,
         aws_conn_id: str = "aws_default",
-        client_request_token: Optional[str] = None,
+        client_request_token: str | None = None,
         workgroup: str = "primary",
-        query_execution_context: Optional[Dict[str, str]] = None,
-        result_configuration: Optional[Dict[str, Any]] = None,
+        query_execution_context: dict[str, str] | None = None,
+        result_configuration: dict[str, Any] | None = None,
         sleep_time: int = 30,
-        max_tries: Optional[int] = None,
-        max_polling_attempts: Optional[int] = None,
+        max_tries: int | None = None,
+        max_polling_attempts: int | None = None,
         **kwargs: Any,
     ) -> None:
         super().__init__(**kwargs)
@@ -100,7 +101,7 @@ class AthenaOperator(BaseOperator):
         """Create and return an AthenaHook."""
         return AthenaHook(self.aws_conn_id, sleep_time=self.sleep_time)
 
-    def execute(self, context: 'Context') -> Optional[str]:
+    def execute(self, context: Context) -> str | None:
         """Run Presto Query on Athena"""
         self.query_execution_context['Database'] = self.database
         self.result_configuration['OutputLocation'] = self.output_location
diff --git a/airflow/providers/amazon/aws/operators/aws_lambda.py b/airflow/providers/amazon/aws/operators/aws_lambda.py
index f7c6ab710c..70d96c094d 100644
--- a/airflow/providers/amazon/aws/operators/aws_lambda.py
+++ b/airflow/providers/amazon/aws/operators/aws_lambda.py
@@ -16,6 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module is deprecated. Please use :mod:`airflow.providers.amazon.aws.operators.lambda_function`."""
+from __future__ import annotations
 
 import warnings
 
diff --git a/airflow/providers/amazon/aws/operators/batch.py b/airflow/providers/amazon/aws/operators/batch.py
index 567c419715..42775e8efd 100644
--- a/airflow/providers/amazon/aws/operators/batch.py
+++ b/airflow/providers/amazon/aws/operators/batch.py
@@ -14,8 +14,6 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
-
 """
 An Airflow operator for AWS Batch services
 
@@ -25,8 +23,10 @@ An Airflow operator for AWS Batch services
     - http://boto3.readthedocs.io/en/latest/reference/services/batch.html
     - https://docs.aws.amazon.com/batch/latest/APIReference/Welcome.html
 """
+from __future__ import annotations
+
 import sys
-from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence
+from typing import TYPE_CHECKING, Any, Optional, Sequence
 
 from airflow.providers.amazon.aws.utils import trim_none_values
 
@@ -128,15 +128,15 @@ class BatchOperator(BaseOperator):
         job_definition: str,
         job_queue: str,
         overrides: dict,
-        array_properties: Optional[dict] = None,
-        parameters: Optional[dict] = None,
-        job_id: Optional[str] = None,
-        waiters: Optional[Any] = None,
-        max_retries: Optional[int] = None,
-        status_retries: Optional[int] = None,
-        aws_conn_id: Optional[str] = None,
-        region_name: Optional[str] = None,
-        tags: Optional[dict] = None,
+        array_properties: dict | None = None,
+        parameters: dict | None = None,
+        job_id: str | None = None,
+        waiters: Any | None = None,
+        max_retries: int | None = None,
+        status_retries: int | None = None,
+        aws_conn_id: str | None = None,
+        region_name: str | None = None,
+        tags: dict | None = None,
         wait_for_completion: bool = True,
         **kwargs,
     ):
@@ -159,7 +159,7 @@ class BatchOperator(BaseOperator):
             region_name=region_name,
         )
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Submit and monitor an AWS Batch job
 
@@ -176,7 +176,7 @@ class BatchOperator(BaseOperator):
         response = self.hook.client.terminate_job(jobId=self.job_id, reason="Task killed by the user")
         self.log.info("AWS Batch job (%s) terminated: %s", self.job_id, response)
 
-    def submit_job(self, context: 'Context'):
+    def submit_job(self, context: Context):
         """
         Submit an AWS Batch job
 
@@ -217,7 +217,7 @@ class BatchOperator(BaseOperator):
             job_id=self.job_id,
         )
 
-    def monitor_job(self, context: 'Context'):
+    def monitor_job(self, context: Context):
         """
         Monitor an AWS Batch job
         monitor_job can raise an exception or an AirflowTaskTimeout can be raised if execution_timeout
@@ -330,13 +330,13 @@ class BatchCreateComputeEnvironmentOperator(BaseOperator):
         environment_type: str,
         state: str,
         compute_resources: dict,
-        unmanaged_v_cpus: Optional[int] = None,
-        service_role: Optional[str] = None,
-        tags: Optional[dict] = None,
-        max_retries: Optional[int] = None,
-        status_retries: Optional[int] = None,
-        aws_conn_id: Optional[str] = None,
-        region_name: Optional[str] = None,
+        unmanaged_v_cpus: int | None = None,
+        service_role: str | None = None,
+        tags: dict | None = None,
+        max_retries: int | None = None,
+        status_retries: int | None = None,
+        aws_conn_id: str | None = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -362,9 +362,9 @@ class BatchCreateComputeEnvironmentOperator(BaseOperator):
             region_name=self.region_name,
         )
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """Create an AWS batch compute environment"""
-        kwargs: Dict[str, Any] = {
+        kwargs: dict[str, Any] = {
             'computeEnvironmentName': self.compute_environment_name,
             'type': self.environment_type,
             'state': self.state,
diff --git a/airflow/providers/amazon/aws/operators/cloud_formation.py b/airflow/providers/amazon/aws/operators/cloud_formation.py
index 8583b1ca1a..19ed747163 100644
--- a/airflow/providers/amazon/aws/operators/cloud_formation.py
+++ b/airflow/providers/amazon/aws/operators/cloud_formation.py
@@ -16,7 +16,9 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module contains CloudFormation create/delete stack operators."""
-from typing import TYPE_CHECKING, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.cloud_formation import CloudFormationHook
@@ -51,7 +53,7 @@ class CloudFormationCreateStackOperator(BaseOperator):
         self.cloudformation_parameters = cloudformation_parameters
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         self.log.info('CloudFormation parameters: %s', self.cloudformation_parameters)
 
         cloudformation_hook = CloudFormationHook(aws_conn_id=self.aws_conn_id)
@@ -81,7 +83,7 @@ class CloudFormationDeleteStackOperator(BaseOperator):
         self,
         *,
         stack_name: str,
-        cloudformation_parameters: Optional[dict] = None,
+        cloudformation_parameters: dict | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -90,7 +92,7 @@ class CloudFormationDeleteStackOperator(BaseOperator):
         self.stack_name = stack_name
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         self.log.info('CloudFormation Parameters: %s', self.cloudformation_parameters)
 
         cloudformation_hook = CloudFormationHook(aws_conn_id=self.aws_conn_id)
diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py
index 84b939c46c..841a086c56 100644
--- a/airflow/providers/amazon/aws/operators/datasync.py
+++ b/airflow/providers/amazon/aws/operators/datasync.py
@@ -14,12 +14,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """Create, get, update, execute and delete an AWS DataSync Task."""
+from __future__ import annotations
 
 import logging
 import random
-from typing import TYPE_CHECKING, List, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException, AirflowTaskTimeout
 from airflow.models import BaseOperator
@@ -123,16 +123,16 @@ class DataSyncOperator(BaseOperator):
         wait_interval_seconds: int = 30,
         max_iterations: int = 60,
         wait_for_completion: bool = True,
-        task_arn: Optional[str] = None,
-        source_location_uri: Optional[str] = None,
-        destination_location_uri: Optional[str] = None,
+        task_arn: str | None = None,
+        source_location_uri: str | None = None,
+        destination_location_uri: str | None = None,
         allow_random_task_choice: bool = False,
         allow_random_location_choice: bool = False,
-        create_task_kwargs: Optional[dict] = None,
-        create_source_location_kwargs: Optional[dict] = None,
-        create_destination_location_kwargs: Optional[dict] = None,
-        update_task_kwargs: Optional[dict] = None,
-        task_execution_kwargs: Optional[dict] = None,
+        create_task_kwargs: dict | None = None,
+        create_source_location_kwargs: dict | None = None,
+        create_destination_location_kwargs: dict | None = None,
+        update_task_kwargs: dict | None = None,
+        task_execution_kwargs: dict | None = None,
         delete_task_after_execution: bool = False,
         **kwargs,
     ):
@@ -177,16 +177,16 @@ class DataSyncOperator(BaseOperator):
             )
 
         # Others
-        self.hook: Optional[DataSyncHook] = None
+        self.hook: DataSyncHook | None = None
         # Candidates - these are found in AWS as possible things
         # for us to use
-        self.candidate_source_location_arns: Optional[List[str]] = None
-        self.candidate_destination_location_arns: Optional[List[str]] = None
-        self.candidate_task_arns: Optional[List[str]] = None
+        self.candidate_source_location_arns: list[str] | None = None
+        self.candidate_destination_location_arns: list[str] | None = None
+        self.candidate_task_arns: list[str] | None = None
         # Actuals
-        self.source_location_arn: Optional[str] = None
-        self.destination_location_arn: Optional[str] = None
-        self.task_execution_arn: Optional[str] = None
+        self.source_location_arn: str | None = None
+        self.destination_location_arn: str | None = None
+        self.task_execution_arn: str | None = None
 
     def get_hook(self) -> DataSyncHook:
         """Create and return DataSyncHook.
@@ -202,7 +202,7 @@ class DataSyncOperator(BaseOperator):
         )
         return self.hook
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         # If task_arn was not specified then try to
         # find 0, 1 or many candidate DataSync Tasks to run
         if not self.task_arn:
@@ -260,7 +260,7 @@ class DataSyncOperator(BaseOperator):
         )
         self.log.info("Found candidate DataSync TaskArns %s", self.candidate_task_arns)
 
-    def choose_task(self, task_arn_list: list) -> Optional[str]:
+    def choose_task(self, task_arn_list: list) -> str | None:
         """Select 1 DataSync TaskArn from a list"""
         if not task_arn_list:
             return None
@@ -274,7 +274,7 @@ class DataSyncOperator(BaseOperator):
             return random.choice(task_arn_list)
         raise AirflowException(f"Unable to choose a Task from {task_arn_list}")
 
-    def choose_location(self, location_arn_list: Optional[List[str]]) -> Optional[str]:
+    def choose_location(self, location_arn_list: list[str] | None) -> str | None:
         """Select 1 DataSync LocationArn from a list"""
         if not location_arn_list:
             return None
@@ -398,7 +398,7 @@ class DataSyncOperator(BaseOperator):
         hook.delete_task(self.task_arn)
         self.log.info("Task Deleted")
 
-    def _get_location_arns(self, location_uri) -> List[str]:
+    def _get_location_arns(self, location_uri) -> list[str]:
         location_arns = self.get_hook().get_location_arns(location_uri)
         self.log.info("Found LocationArns %s for LocationUri %s", location_arns, location_uri)
         return location_arns
diff --git a/airflow/providers/amazon/aws/operators/dms.py b/airflow/providers/amazon/aws/operators/dms.py
index aca515cfed..8e1738aea1 100644
--- a/airflow/providers/amazon/aws/operators/dms.py
+++ b/airflow/providers/amazon/aws/operators/dms.py
@@ -15,7 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, Dict, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.dms import DmsHook
@@ -70,7 +72,7 @@ class DmsCreateTaskOperator(BaseOperator):
         replication_instance_arn: str,
         table_mappings: dict,
         migration_type: str = 'full-load',
-        create_task_kwargs: Optional[dict] = None,
+        create_task_kwargs: dict | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -84,7 +86,7 @@ class DmsCreateTaskOperator(BaseOperator):
         self.create_task_kwargs = create_task_kwargs or {}
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Creates AWS DMS replication task from Airflow
 
@@ -124,12 +126,12 @@ class DmsDeleteTaskOperator(BaseOperator):
 
     template_fields: Sequence[str] = ('replication_task_arn',)
     template_ext: Sequence[str] = ()
-    template_fields_renderers: Dict[str, str] = {}
+    template_fields_renderers: dict[str, str] = {}
 
     def __init__(
         self,
         *,
-        replication_task_arn: Optional[str] = None,
+        replication_task_arn: str | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -137,7 +139,7 @@ class DmsDeleteTaskOperator(BaseOperator):
         self.replication_task_arn = replication_task_arn
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Deletes AWS DMS replication task from Airflow
 
@@ -166,12 +168,12 @@ class DmsDescribeTasksOperator(BaseOperator):
 
     template_fields: Sequence[str] = ('describe_tasks_kwargs',)
     template_ext: Sequence[str] = ()
-    template_fields_renderers: Dict[str, str] = {'describe_tasks_kwargs': 'json'}
+    template_fields_renderers: dict[str, str] = {'describe_tasks_kwargs': 'json'}
 
     def __init__(
         self,
         *,
-        describe_tasks_kwargs: Optional[dict] = None,
+        describe_tasks_kwargs: dict | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -179,7 +181,7 @@ class DmsDescribeTasksOperator(BaseOperator):
         self.describe_tasks_kwargs = describe_tasks_kwargs or {}
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Describes AWS DMS replication tasks from Airflow
 
@@ -222,7 +224,7 @@ class DmsStartTaskOperator(BaseOperator):
         *,
         replication_task_arn: str,
         start_replication_task_type: str = 'start-replication',
-        start_task_kwargs: Optional[dict] = None,
+        start_task_kwargs: dict | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -232,7 +234,7 @@ class DmsStartTaskOperator(BaseOperator):
         self.start_task_kwargs = start_task_kwargs or {}
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Starts AWS DMS replication task from Airflow
 
@@ -266,12 +268,12 @@ class DmsStopTaskOperator(BaseOperator):
 
     template_fields: Sequence[str] = ('replication_task_arn',)
     template_ext: Sequence[str] = ()
-    template_fields_renderers: Dict[str, str] = {}
+    template_fields_renderers: dict[str, str] = {}
 
     def __init__(
         self,
         *,
-        replication_task_arn: Optional[str] = None,
+        replication_task_arn: str | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -279,7 +281,7 @@ class DmsStopTaskOperator(BaseOperator):
         self.replication_task_arn = replication_task_arn
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Stops AWS DMS replication task from Airflow
 
diff --git a/airflow/providers/amazon/aws/operators/ec2.py b/airflow/providers/amazon/aws/operators/ec2.py
index 133596929e..60cb43a32d 100644
--- a/airflow/providers/amazon/aws/operators/ec2.py
+++ b/airflow/providers/amazon/aws/operators/ec2.py
@@ -15,9 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
@@ -50,7 +50,7 @@ class EC2StartInstanceOperator(BaseOperator):
         *,
         instance_id: str,
         aws_conn_id: str = "aws_default",
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         check_interval: float = 15,
         **kwargs,
     ):
@@ -60,7 +60,7 @@ class EC2StartInstanceOperator(BaseOperator):
         self.region_name = region_name
         self.check_interval = check_interval
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         ec2_hook = EC2Hook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
         self.log.info("Starting EC2 instance %s", self.instance_id)
         instance = ec2_hook.get_instance(instance_id=self.instance_id)
@@ -96,7 +96,7 @@ class EC2StopInstanceOperator(BaseOperator):
         *,
         instance_id: str,
         aws_conn_id: str = "aws_default",
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         check_interval: float = 15,
         **kwargs,
     ):
@@ -106,7 +106,7 @@ class EC2StopInstanceOperator(BaseOperator):
         self.region_name = region_name
         self.check_interval = check_interval
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         ec2_hook = EC2Hook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
         self.log.info("Stopping EC2 instance %s", self.instance_id)
         instance = ec2_hook.get_instance(instance_id=self.instance_id)
diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/airflow/providers/amazon/aws/operators/ecs.py
index 3336ecb9b5..a70c4b2007 100644
--- a/airflow/providers/amazon/aws/operators/ecs.py
+++ b/airflow/providers/amazon/aws/operators/ecs.py
@@ -15,11 +15,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import re
 import sys
 import warnings
 from datetime import timedelta
-from typing import TYPE_CHECKING, Dict, List, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 import boto3
 
@@ -49,9 +51,7 @@ DEFAULT_CONN_ID = 'aws_default'
 class EcsBaseOperator(BaseOperator):
     """This is the base operator for all Elastic Container Service operators."""
 
-    def __init__(
-        self, *, aws_conn_id: Optional[str] = DEFAULT_CONN_ID, region: Optional[str] = None, **kwargs
-    ):
+    def __init__(self, *, aws_conn_id: str | None = DEFAULT_CONN_ID, region: str | None = None, **kwargs):
         self.aws_conn_id = aws_conn_id
         self.region = region
         super().__init__(**kwargs)
@@ -66,7 +66,7 @@ class EcsBaseOperator(BaseOperator):
         """Create and return the EcsHook's client."""
         return self.hook.conn
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """Must overwrite in child classes."""
         raise NotImplementedError('Please implement execute() in subclass')
 
@@ -91,7 +91,7 @@ class EcsCreateClusterOperator(EcsBaseOperator):
         self,
         *,
         cluster_name: str,
-        create_cluster_kwargs: Optional[Dict] = None,
+        create_cluster_kwargs: dict | None = None,
         wait_for_completion: bool = True,
         **kwargs,
     ) -> None:
@@ -100,7 +100,7 @@ class EcsCreateClusterOperator(EcsBaseOperator):
         self.create_cluster_kwargs = create_cluster_kwargs or {}
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         self.log.info(
             'Creating cluster %s using the following values: %s',
             self.cluster_name,
@@ -145,7 +145,7 @@ class EcsDeleteClusterOperator(EcsBaseOperator):
         self.cluster_name = cluster_name
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         self.log.info('Deleting cluster %s.', self.cluster_name)
         result = self.client.delete_cluster(cluster=self.cluster_name)
 
@@ -183,7 +183,7 @@ class EcsDeregisterTaskDefinitionOperator(EcsBaseOperator):
         self.task_definition = task_definition
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         self.log.info('Deregistering task definition %s.', self.task_definition)
         result = self.client.deregister_task_definition(taskDefinition=self.task_definition)
 
@@ -226,8 +226,8 @@ class EcsRegisterTaskDefinitionOperator(EcsBaseOperator):
         self,
         *,
         family: str,
-        container_definitions: List[Dict],
-        register_task_kwargs: Optional[Dict] = None,
+        container_definitions: list[dict],
+        register_task_kwargs: dict | None = None,
         wait_for_completion: bool = True,
         **kwargs,
     ):
@@ -237,7 +237,7 @@ class EcsRegisterTaskDefinitionOperator(EcsBaseOperator):
         self.register_task_kwargs = register_task_kwargs or {}
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         self.log.info(
             'Registering task definition %s using the following values: %s',
             self.family,
@@ -354,19 +354,19 @@ class EcsRunTaskOperator(EcsBaseOperator):
         cluster: str,
         overrides: dict,
         launch_type: str = 'EC2',
-        capacity_provider_strategy: Optional[list] = None,
-        group: Optional[str] = None,
-        placement_constraints: Optional[list] = None,
-        placement_strategy: Optional[list] = None,
-        platform_version: Optional[str] = None,
-        network_configuration: Optional[dict] = None,
-        tags: Optional[dict] = None,
-        awslogs_group: Optional[str] = None,
-        awslogs_region: Optional[str] = None,
-        awslogs_stream_prefix: Optional[str] = None,
+        capacity_provider_strategy: list | None = None,
+        group: str | None = None,
+        placement_constraints: list | None = None,
+        placement_strategy: list | None = None,
+        platform_version: str | None = None,
+        network_configuration: dict | None = None,
+        tags: dict | None = None,
+        awslogs_group: str | None = None,
+        awslogs_region: str | None = None,
+        awslogs_stream_prefix: str | None = None,
         awslogs_fetch_interval: timedelta = timedelta(seconds=30),
-        propagate_tags: Optional[str] = None,
-        quota_retry: Optional[dict] = None,
+        propagate_tags: str | None = None,
+        quota_retry: dict | None = None,
         reattach: bool = False,
         number_logs_exception: int = 10,
         wait_for_completion: bool = True,
@@ -397,9 +397,9 @@ class EcsRunTaskOperator(EcsBaseOperator):
         if self.awslogs_region is None:
             self.awslogs_region = self.region
 
-        self.arn: Optional[str] = None
+        self.arn: str | None = None
         self.retry_args = quota_retry
-        self.task_log_fetcher: Optional[EcsTaskLogFetcher] = None
+        self.task_log_fetcher: EcsTaskLogFetcher | None = None
         self.wait_for_completion = wait_for_completion
 
     @provide_session
diff --git a/airflow/providers/amazon/aws/operators/eks.py b/airflow/providers/amazon/aws/operators/eks.py
index fb1870c187..bf24b5f1e8 100644
--- a/airflow/providers/amazon/aws/operators/eks.py
+++ b/airflow/providers/amazon/aws/operators/eks.py
@@ -14,12 +14,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains Amazon EKS operators."""
+from __future__ import annotations
+
 import warnings
 from ast import literal_eval
 from time import sleep
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union, cast
+from typing import TYPE_CHECKING, Any, List, Sequence, cast
 
 from airflow import AirflowException
 from airflow.models import BaseOperator
@@ -124,18 +125,18 @@ class EksCreateClusterOperator(BaseOperator):
         self,
         cluster_name: str,
         cluster_role_arn: str,
-        resources_vpc_config: Dict[str, Any],
-        compute: Optional[str] = DEFAULT_COMPUTE_TYPE,
-        create_cluster_kwargs: Optional[Dict] = None,
+        resources_vpc_config: dict[str, Any],
+        compute: str | None = DEFAULT_COMPUTE_TYPE,
+        create_cluster_kwargs: dict | None = None,
         nodegroup_name: str = DEFAULT_NODEGROUP_NAME,
-        nodegroup_role_arn: Optional[str] = None,
-        create_nodegroup_kwargs: Optional[Dict] = None,
+        nodegroup_role_arn: str | None = None,
+        create_nodegroup_kwargs: dict | None = None,
         fargate_profile_name: str = DEFAULT_FARGATE_PROFILE_NAME,
-        fargate_pod_execution_role_arn: Optional[str] = None,
-        fargate_selectors: Optional[List] = None,
-        create_fargate_profile_kwargs: Optional[Dict] = None,
+        fargate_pod_execution_role_arn: str | None = None,
+        fargate_selectors: list | None = None,
+        create_fargate_profile_kwargs: dict | None = None,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         self.compute = compute
@@ -154,7 +155,7 @@ class EksCreateClusterOperator(BaseOperator):
         self.region = region
         super().__init__(**kwargs)
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         if self.compute:
             if self.compute not in SUPPORTED_COMPUTE_VALUES:
                 raise ValueError("Provided compute type is not supported.")
@@ -260,12 +261,12 @@ class EksCreateNodegroupOperator(BaseOperator):
     def __init__(
         self,
         cluster_name: str,
-        nodegroup_subnets: Union[List[str], str],
+        nodegroup_subnets: list[str] | str,
         nodegroup_role_arn: str,
         nodegroup_name: str = DEFAULT_NODEGROUP_NAME,
-        create_nodegroup_kwargs: Optional[Dict] = None,
+        create_nodegroup_kwargs: dict | None = None,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         self.cluster_name = cluster_name
@@ -277,9 +278,9 @@ class EksCreateNodegroupOperator(BaseOperator):
         self.nodegroup_subnets = nodegroup_subnets
         super().__init__(**kwargs)
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         if isinstance(self.nodegroup_subnets, str):
-            nodegroup_subnets_list: List[str] = []
+            nodegroup_subnets_list: list[str] = []
             if self.nodegroup_subnets != "":
                 try:
                     nodegroup_subnets_list = cast(List, literal_eval(self.nodegroup_subnets))
@@ -343,11 +344,11 @@ class EksCreateFargateProfileOperator(BaseOperator):
         self,
         cluster_name: str,
         pod_execution_role_arn: str,
-        selectors: List,
-        fargate_profile_name: Optional[str] = DEFAULT_FARGATE_PROFILE_NAME,
-        create_fargate_profile_kwargs: Optional[Dict] = None,
+        selectors: list,
+        fargate_profile_name: str | None = DEFAULT_FARGATE_PROFILE_NAME,
+        create_fargate_profile_kwargs: dict | None = None,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         self.cluster_name = cluster_name
@@ -359,7 +360,7 @@ class EksCreateFargateProfileOperator(BaseOperator):
         self.region = region
         super().__init__(**kwargs)
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         eks_hook = EksHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
@@ -407,7 +408,7 @@ class EksDeleteClusterOperator(BaseOperator):
         cluster_name: str,
         force_delete_compute: bool = False,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         self.cluster_name = cluster_name
@@ -416,7 +417,7 @@ class EksDeleteClusterOperator(BaseOperator):
         self.region = region
         super().__init__(**kwargs)
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         eks_hook = EksHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
@@ -527,7 +528,7 @@ class EksDeleteNodegroupOperator(BaseOperator):
         cluster_name: str,
         nodegroup_name: str,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         self.cluster_name = cluster_name
@@ -536,7 +537,7 @@ class EksDeleteNodegroupOperator(BaseOperator):
         self.region = region
         super().__init__(**kwargs)
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         eks_hook = EksHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
@@ -576,7 +577,7 @@ class EksDeleteFargateProfileOperator(BaseOperator):
         cluster_name: str,
         fargate_profile_name: str,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -585,7 +586,7 @@ class EksDeleteFargateProfileOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.region = region
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         eks_hook = EksHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
@@ -645,12 +646,12 @@ class EksPodOperator(KubernetesPodOperator):
         # file is stored locally in the worker and not in the cluster.
         in_cluster: bool = False,
         namespace: str = DEFAULT_NAMESPACE_NAME,
-        pod_context: Optional[str] = None,
-        pod_name: Optional[str] = None,
-        pod_username: Optional[str] = None,
+        pod_context: str | None = None,
+        pod_name: str | None = None,
+        pod_username: str | None = None,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
-        is_delete_operator_pod: Optional[bool] = None,
+        region: str | None = None,
+        is_delete_operator_pod: bool | None = None,
         **kwargs,
     ) -> None:
         if is_delete_operator_pod is None:
@@ -682,7 +683,7 @@ class EksPodOperator(KubernetesPodOperator):
         if self.config_file:
             raise AirflowException("The config_file is not an allowed parameter for the EksPodOperator.")
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         eks_hook = EksHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
diff --git a/airflow/providers/amazon/aws/operators/emr.py b/airflow/providers/amazon/aws/operators/emr.py
index f06f3834e9..5028dfed86 100644
--- a/airflow/providers/amazon/aws/operators/emr.py
+++ b/airflow/providers/amazon/aws/operators/emr.py
@@ -15,9 +15,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import ast
 import warnings
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Any, Sequence
 from uuid import uuid4
 
 from airflow.exceptions import AirflowException
@@ -61,11 +63,11 @@ class EmrAddStepsOperator(BaseOperator):
     def __init__(
         self,
         *,
-        job_flow_id: Optional[str] = None,
-        job_flow_name: Optional[str] = None,
-        cluster_states: Optional[List[str]] = None,
+        job_flow_id: str | None = None,
+        job_flow_name: str | None = None,
+        cluster_states: list[str] | None = None,
         aws_conn_id: str = 'aws_default',
-        steps: Optional[Union[List[dict], str]] = None,
+        steps: list[dict] | str | None = None,
         **kwargs,
     ):
         if not (job_flow_id is None) ^ (job_flow_name is None):
@@ -79,7 +81,7 @@ class EmrAddStepsOperator(BaseOperator):
         self.cluster_states = cluster_states
         self.steps = steps
 
-    def execute(self, context: 'Context') -> List[str]:
+    def execute(self, context: Context) -> list[str]:
         emr_hook = EmrHook(aws_conn_id=self.aws_conn_id)
 
         emr = emr_hook.get_conn()
@@ -151,7 +153,7 @@ class EmrEksCreateClusterOperator(BaseOperator):
         eks_namespace: str,
         virtual_cluster_id: str = '',
         aws_conn_id: str = "aws_default",
-        tags: Optional[dict] = None,
+        tags: dict | None = None,
         **kwargs: Any,
     ) -> None:
         super().__init__(**kwargs)
@@ -167,7 +169,7 @@ class EmrEksCreateClusterOperator(BaseOperator):
         """Create and return an EmrContainerHook."""
         return EmrContainerHook(self.aws_conn_id)
 
-    def execute(self, context: 'Context') -> Optional[str]:
+    def execute(self, context: Context) -> str | None:
         """Create EMR on EKS virtual Cluster"""
         self.virtual_cluster_id = self.hook.create_emr_on_eks_cluster(
             self.virtual_cluster_name, self.eks_cluster_name, self.eks_namespace, self.tags
@@ -220,14 +222,14 @@ class EmrContainerOperator(BaseOperator):
         execution_role_arn: str,
         release_label: str,
         job_driver: dict,
-        configuration_overrides: Optional[dict] = None,
-        client_request_token: Optional[str] = None,
+        configuration_overrides: dict | None = None,
+        client_request_token: str | None = None,
         aws_conn_id: str = "aws_default",
         wait_for_completion: bool = True,
         poll_interval: int = 30,
-        max_tries: Optional[int] = None,
-        tags: Optional[dict] = None,
-        max_polling_attempts: Optional[int] = None,
+        max_tries: int | None = None,
+        tags: dict | None = None,
+        max_polling_attempts: int | None = None,
         **kwargs: Any,
     ) -> None:
         super().__init__(**kwargs)
@@ -243,7 +245,7 @@ class EmrContainerOperator(BaseOperator):
         self.poll_interval = poll_interval
         self.max_polling_attempts = max_polling_attempts
         self.tags = tags
-        self.job_id: Optional[str] = None
+        self.job_id: str | None = None
 
         if max_tries:
             warnings.warn(
@@ -265,7 +267,7 @@ class EmrContainerOperator(BaseOperator):
             virtual_cluster_id=self.virtual_cluster_id,
         )
 
-    def execute(self, context: 'Context') -> Optional[str]:
+    def execute(self, context: Context) -> str | None:
         """Run job on EMR Containers"""
         self.job_id = self.hook.submit_job(
             self.name,
@@ -349,8 +351,8 @@ class EmrCreateJobFlowOperator(BaseOperator):
         *,
         aws_conn_id: str = 'aws_default',
         emr_conn_id: str = 'emr_default',
-        job_flow_overrides: Optional[Union[str, Dict[str, Any]]] = None,
-        region_name: Optional[str] = None,
+        job_flow_overrides: str | dict[str, Any] | None = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -361,7 +363,7 @@ class EmrCreateJobFlowOperator(BaseOperator):
         self.job_flow_overrides = job_flow_overrides
         self.region_name = region_name
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         emr = EmrHook(
             aws_conn_id=self.aws_conn_id, emr_conn_id=self.emr_conn_id, region_name=self.region_name
         )
@@ -370,7 +372,7 @@ class EmrCreateJobFlowOperator(BaseOperator):
             'Creating JobFlow using aws-conn-id: %s, emr-conn-id: %s', self.aws_conn_id, self.emr_conn_id
         )
         if isinstance(self.job_flow_overrides, str):
-            job_flow_overrides: Dict[str, Any] = ast.literal_eval(self.job_flow_overrides)
+            job_flow_overrides: dict[str, Any] = ast.literal_eval(self.job_flow_overrides)
             self.job_flow_overrides = job_flow_overrides
         else:
             job_flow_overrides = self.job_flow_overrides
@@ -418,7 +420,7 @@ class EmrModifyClusterOperator(BaseOperator):
         self.cluster_id = cluster_id
         self.step_concurrency_level = step_concurrency_level
 
-    def execute(self, context: 'Context') -> int:
+    def execute(self, context: Context) -> int:
         emr_hook = EmrHook(aws_conn_id=self.aws_conn_id)
         emr = emr_hook.get_conn()
 
@@ -467,7 +469,7 @@ class EmrTerminateJobFlowOperator(BaseOperator):
         self.job_flow_id = job_flow_id
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         emr_hook = EmrHook(aws_conn_id=self.aws_conn_id)
         emr = emr_hook.get_conn()
 
@@ -510,7 +512,7 @@ class EmrServerlessCreateApplicationOperator(BaseOperator):
         release_label: str,
         job_type: str,
         client_request_token: str = '',
-        config: Optional[dict] = None,
+        config: dict | None = None,
         wait_for_completion: bool = True,
         aws_conn_id: str = 'aws_default',
         **kwargs,
@@ -530,7 +532,7 @@ class EmrServerlessCreateApplicationOperator(BaseOperator):
         """Create and return an EmrServerlessHook."""
         return EmrServerlessHook(aws_conn_id=self.aws_conn_id)
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         response = self.hook.conn.create_application(
             clientToken=self.client_request_token,
             releaseLabel=self.release_label,
@@ -604,9 +606,9 @@ class EmrServerlessStartJobOperator(BaseOperator):
         application_id: str,
         execution_role_arn: str,
         job_driver: dict,
-        configuration_overrides: Optional[dict],
+        configuration_overrides: dict | None,
         client_request_token: str = '',
-        config: Optional[dict] = None,
+        config: dict | None = None,
         wait_for_completion: bool = True,
         aws_conn_id: str = 'aws_default',
         **kwargs,
@@ -627,7 +629,7 @@ class EmrServerlessStartJobOperator(BaseOperator):
         """Create and return an EmrServerlessHook."""
         return EmrServerlessHook(aws_conn_id=self.aws_conn_id)
 
-    def execute(self, context: 'Context') -> Dict:
+    def execute(self, context: Context) -> dict:
         self.log.info('Starting job on Application: %s', self.application_id)
 
         app_state = self.hook.conn.get_application(applicationId=self.application_id)['application']['state']
@@ -706,7 +708,7 @@ class EmrServerlessDeleteApplicationOperator(BaseOperator):
         """Create and return an EmrServerlessHook."""
         return EmrServerlessHook(aws_conn_id=self.aws_conn_id)
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         self.log.info('Stopping application: %s', self.application_id)
         self.hook.conn.stop_application(applicationId=self.application_id)
 
diff --git a/airflow/providers/amazon/aws/operators/glacier.py b/airflow/providers/amazon/aws/operators/glacier.py
index 337492a452..6d96a5de7f 100644
--- a/airflow/providers/amazon/aws/operators/glacier.py
+++ b/airflow/providers/amazon/aws/operators/glacier.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
@@ -49,6 +51,6 @@ class GlacierCreateJobOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.vault_name = vault_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         hook = GlacierHook(aws_conn_id=self.aws_conn_id)
         return hook.retrieve_inventory(vault_name=self.vault_name)
diff --git a/airflow/providers/amazon/aws/operators/glue.py b/airflow/providers/amazon/aws/operators/glue.py
index 48b201ba9c..dc631bdb96 100644
--- a/airflow/providers/amazon/aws/operators/glue.py
+++ b/airflow/providers/amazon/aws/operators/glue.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import os.path
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.glue import GlueJobHook
@@ -72,17 +73,17 @@ class GlueJobOperator(BaseOperator):
         *,
         job_name: str = 'aws_glue_default_job',
         job_desc: str = 'AWS Glue Job with Airflow',
-        script_location: Optional[str] = None,
-        concurrent_run_limit: Optional[int] = None,
-        script_args: Optional[dict] = None,
+        script_location: str | None = None,
+        concurrent_run_limit: int | None = None,
+        script_args: dict | None = None,
         retry_limit: int = 0,
-        num_of_dpus: Optional[int] = None,
+        num_of_dpus: int | None = None,
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
-        s3_bucket: Optional[str] = None,
-        iam_role_name: Optional[str] = None,
-        create_job_kwargs: Optional[dict] = None,
-        run_job_kwargs: Optional[dict] = None,
+        region_name: str | None = None,
+        s3_bucket: str | None = None,
+        iam_role_name: str | None = None,
+        create_job_kwargs: dict | None = None,
+        run_job_kwargs: dict | None = None,
         wait_for_completion: bool = True,
         verbose: bool = False,
         **kwargs,
@@ -106,7 +107,7 @@ class GlueJobOperator(BaseOperator):
         self.wait_for_completion = wait_for_completion
         self.verbose = verbose
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Executes AWS Glue Job from Airflow
 
diff --git a/airflow/providers/amazon/aws/operators/glue_crawler.py b/airflow/providers/amazon/aws/operators/glue_crawler.py
index b7968d72aa..4bec7927db 100644
--- a/airflow/providers/amazon/aws/operators/glue_crawler.py
+++ b/airflow/providers/amazon/aws/operators/glue_crawler.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from typing import TYPE_CHECKING, Sequence
 
 if TYPE_CHECKING:
@@ -63,7 +65,7 @@ class GlueCrawlerOperator(BaseOperator):
         """Create and return an GlueCrawlerHook."""
         return GlueCrawlerHook(self.aws_conn_id)
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Executes AWS Glue Crawler from Airflow
 
diff --git a/airflow/providers/amazon/aws/operators/lambda_function.py b/airflow/providers/amazon/aws/operators/lambda_function.py
index c2d9d022fb..1796702b88 100644
--- a/airflow/providers/amazon/aws/operators/lambda_function.py
+++ b/airflow/providers/amazon/aws/operators/lambda_function.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import json
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook
@@ -54,11 +55,11 @@ class AwsLambdaInvokeFunctionOperator(BaseOperator):
         self,
         *,
         function_name: str,
-        log_type: Optional[str] = None,
-        qualifier: Optional[str] = None,
-        invocation_type: Optional[str] = None,
-        client_context: Optional[str] = None,
-        payload: Optional[str] = None,
+        log_type: str | None = None,
+        qualifier: str | None = None,
+        invocation_type: str | None = None,
+        client_context: str | None = None,
+        payload: str | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -71,7 +72,7 @@ class AwsLambdaInvokeFunctionOperator(BaseOperator):
         self.client_context = client_context
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Invokes the target AWS Lambda function from Airflow.
 
diff --git a/airflow/providers/amazon/aws/operators/quicksight.py b/airflow/providers/amazon/aws/operators/quicksight.py
index 46cde58d79..85514af806 100644
--- a/airflow/providers/amazon/aws/operators/quicksight.py
+++ b/airflow/providers/amazon/aws/operators/quicksight.py
@@ -14,8 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook
@@ -71,7 +72,7 @@ class QuickSightCreateIngestionOperator(BaseOperator):
         wait_for_completion: bool = True,
         check_interval: int = 30,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ):
         self.data_set_id = data_set_id
@@ -83,7 +84,7 @@ class QuickSightCreateIngestionOperator(BaseOperator):
         self.region = region
         super().__init__(**kwargs)
 
-    def execute(self, context: "Context"):
+    def execute(self, context: Context):
         hook = QuickSightHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
diff --git a/airflow/providers/amazon/aws/operators/rds.py b/airflow/providers/amazon/aws/operators/rds.py
index 5aac2f485e..e79086c2da 100644
--- a/airflow/providers/amazon/aws/operators/rds.py
+++ b/airflow/providers/amazon/aws/operators/rds.py
@@ -15,10 +15,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import json
 import time
-from typing import TYPE_CHECKING, Dict, List, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from mypy_boto3_rds.type_defs import TagTypeDef
 
@@ -37,7 +38,7 @@ class RdsBaseOperator(BaseOperator):
     ui_color = "#eeaa88"
     ui_fgcolor = "#ffffff"
 
-    def __init__(self, *args, aws_conn_id: str = "aws_conn_id", hook_params: Optional[dict] = None, **kwargs):
+    def __init__(self, *args, aws_conn_id: str = "aws_conn_id", hook_params: dict | None = None, **kwargs):
         hook_params = hook_params or {}
         self.hook = RdsHook(aws_conn_id=aws_conn_id, **hook_params)
         super().__init__(*args, **kwargs)
@@ -65,9 +66,9 @@ class RdsBaseOperator(BaseOperator):
         self,
         item_type: str,
         item_name: str,
-        wait_statuses: Optional[List[str]] = None,
-        ok_statuses: Optional[List[str]] = None,
-        error_statuses: Optional[List[str]] = None,
+        wait_statuses: list[str] | None = None,
+        ok_statuses: list[str] | None = None,
+        error_statuses: list[str] | None = None,
     ) -> None:
         """
         Continuously gets item description from `_describe_item()` and waits while:
@@ -94,7 +95,7 @@ class RdsBaseOperator(BaseOperator):
 
         return None
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         """Different implementations for snapshots, tasks and events"""
         raise NotImplementedError
 
@@ -128,7 +129,7 @@ class RdsCreateDbSnapshotOperator(RdsBaseOperator):
         db_type: str,
         db_identifier: str,
         db_snapshot_identifier: str,
-        tags: Optional[Sequence[TagTypeDef]] = None,
+        tags: Sequence[TagTypeDef] | None = None,
         wait_for_completion: bool = True,
         aws_conn_id: str = "aws_conn_id",
         **kwargs,
@@ -140,7 +141,7 @@ class RdsCreateDbSnapshotOperator(RdsBaseOperator):
         self.tags = tags or []
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info(
             "Starting to create snapshot of RDS %s '%s': %s",
             self.db_type,
@@ -215,7 +216,7 @@ class RdsCopyDbSnapshotOperator(RdsBaseOperator):
         source_db_snapshot_identifier: str,
         target_db_snapshot_identifier: str,
         kms_key_id: str = "",
-        tags: Optional[Sequence[TagTypeDef]] = None,
+        tags: Sequence[TagTypeDef] | None = None,
         copy_tags: bool = False,
         pre_signed_url: str = "",
         option_group_name: str = "",
@@ -239,7 +240,7 @@ class RdsCopyDbSnapshotOperator(RdsBaseOperator):
         self.source_region = source_region
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info(
             "Starting to copy snapshot '%s' as '%s'",
             self.source_db_snapshot_identifier,
@@ -311,7 +312,7 @@ class RdsDeleteDbSnapshotOperator(RdsBaseOperator):
         self.db_type = RdsDbType(db_type)
         self.db_snapshot_identifier = db_snapshot_identifier
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info("Starting to delete snapshot '%s'", self.db_snapshot_identifier)
 
         if self.db_type.value == "instance":
@@ -365,7 +366,7 @@ class RdsStartExportTaskOperator(RdsBaseOperator):
         iam_role_arn: str,
         kms_key_id: str,
         s3_prefix: str = '',
-        export_only: Optional[List[str]] = None,
+        export_only: list[str] | None = None,
         wait_for_completion: bool = True,
         aws_conn_id: str = "aws_default",
         **kwargs,
@@ -381,7 +382,7 @@ class RdsStartExportTaskOperator(RdsBaseOperator):
         self.export_only = export_only or []
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info("Starting export task %s for snapshot %s", self.export_task_identifier, self.source_arn)
 
         start_export = self.hook.conn.start_export_task(
@@ -433,7 +434,7 @@ class RdsCancelExportTaskOperator(RdsBaseOperator):
         self.export_task_identifier = export_task_identifier
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info("Canceling export task %s", self.export_task_identifier)
 
         cancel_export = self.hook.conn.cancel_export_task(
@@ -487,10 +488,10 @@ class RdsCreateEventSubscriptionOperator(RdsBaseOperator):
         subscription_name: str,
         sns_topic_arn: str,
         source_type: str = "",
-        event_categories: Optional[Sequence[str]] = None,
-        source_ids: Optional[Sequence[str]] = None,
+        event_categories: Sequence[str] | None = None,
+        source_ids: Sequence[str] | None = None,
         enabled: bool = True,
-        tags: Optional[Sequence[TagTypeDef]] = None,
+        tags: Sequence[TagTypeDef] | None = None,
         wait_for_completion: bool = True,
         aws_conn_id: str = "aws_default",
         **kwargs,
@@ -506,7 +507,7 @@ class RdsCreateEventSubscriptionOperator(RdsBaseOperator):
         self.tags = tags or []
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info("Creating event subscription '%s' to '%s'", self.subscription_name, self.sns_topic_arn)
 
         create_subscription = self.hook.conn.create_event_subscription(
@@ -554,7 +555,7 @@ class RdsDeleteEventSubscriptionOperator(RdsBaseOperator):
 
         self.subscription_name = subscription_name
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info(
             "Deleting event subscription %s",
             self.subscription_name,
@@ -593,7 +594,7 @@ class RdsCreateDbInstanceOperator(RdsBaseOperator):
         db_instance_identifier: str,
         db_instance_class: str,
         engine: str,
-        rds_kwargs: Optional[Dict] = None,
+        rds_kwargs: dict | None = None,
         aws_conn_id: str = "aws_default",
         wait_for_completion: bool = True,
         **kwargs,
@@ -606,7 +607,7 @@ class RdsCreateDbInstanceOperator(RdsBaseOperator):
         self.rds_kwargs = rds_kwargs or {}
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info("Creating new DB instance %s", self.db_instance_identifier)
 
         create_db_instance = self.hook.conn.create_db_instance(
@@ -645,7 +646,7 @@ class RdsDeleteDbInstanceOperator(RdsBaseOperator):
         self,
         *,
         db_instance_identifier: str,
-        rds_kwargs: Optional[Dict] = None,
+        rds_kwargs: dict | None = None,
         aws_conn_id: str = "aws_default",
         wait_for_completion: bool = True,
         **kwargs,
@@ -655,7 +656,7 @@ class RdsDeleteDbInstanceOperator(RdsBaseOperator):
         self.rds_kwargs = rds_kwargs or {}
         self.wait_for_completion = wait_for_completion
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         self.log.info("Deleting DB instance %s", self.db_instance_identifier)
 
         delete_db_instance = self.hook.conn.delete_db_instance(
diff --git a/airflow/providers/amazon/aws/operators/redshift_cluster.py b/airflow/providers/amazon/aws/operators/redshift_cluster.py
index 639ef6ef0c..ba49434cab 100644
--- a/airflow/providers/amazon/aws/operators/redshift_cluster.py
+++ b/airflow/providers/amazon/aws/operators/redshift_cluster.py
@@ -14,8 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import time
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence
+from typing import TYPE_CHECKING, Any, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
@@ -103,32 +105,32 @@ class RedshiftCreateClusterOperator(BaseOperator):
         cluster_type: str = "multi-node",
         db_name: str = "dev",
         number_of_nodes: int = 1,
-        cluster_security_groups: Optional[List[str]] = None,
-        vpc_security_group_ids: Optional[List[str]] = None,
-        cluster_subnet_group_name: Optional[str] = None,
-        availability_zone: Optional[str] = None,
-        preferred_maintenance_window: Optional[str] = None,
-        cluster_parameter_group_name: Optional[str] = None,
+        cluster_security_groups: list[str] | None = None,
+        vpc_security_group_ids: list[str] | None = None,
+        cluster_subnet_group_name: str | None = None,
+        availability_zone: str | None = None,
+        preferred_maintenance_window: str | None = None,
+        cluster_parameter_group_name: str | None = None,
         automated_snapshot_retention_period: int = 1,
-        manual_snapshot_retention_period: Optional[int] = None,
+        manual_snapshot_retention_period: int | None = None,
         port: int = 5439,
         cluster_version: str = "1.0",
         allow_version_upgrade: bool = True,
         publicly_accessible: bool = True,
         encrypted: bool = False,
-        hsm_client_certificate_identifier: Optional[str] = None,
-        hsm_configuration_identifier: Optional[str] = None,
-        elastic_ip: Optional[str] = None,
-        tags: Optional[List[Any]] = None,
-        kms_key_id: Optional[str] = None,
+        hsm_client_certificate_identifier: str | None = None,
+        hsm_configuration_identifier: str | None = None,
+        elastic_ip: str | None = None,
+        tags: list[Any] | None = None,
+        kms_key_id: str | None = None,
         enhanced_vpc_routing: bool = False,
-        additional_info: Optional[str] = None,
-        iam_roles: Optional[List[str]] = None,
-        maintenance_track_name: Optional[str] = None,
-        snapshot_schedule_identifier: Optional[str] = None,
-        availability_zone_relocation: Optional[bool] = None,
-        aqua_configuration_status: Optional[str] = None,
-        default_iam_role_arn: Optional[str] = None,
+        additional_info: str | None = None,
+        iam_roles: list[str] | None = None,
+        maintenance_track_name: str | None = None,
+        snapshot_schedule_identifier: str | None = None,
+        availability_zone_relocation: bool | None = None,
+        aqua_configuration_status: str | None = None,
+        default_iam_role_arn: str | None = None,
         aws_conn_id: str = "aws_default",
         **kwargs,
     ):
@@ -169,10 +171,10 @@ class RedshiftCreateClusterOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.kwargs = kwargs
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         redshift_hook = RedshiftHook(aws_conn_id=self.aws_conn_id)
         self.log.info("Creating Redshift cluster %s", self.cluster_identifier)
-        params: Dict[str, Any] = {}
+        params: dict[str, Any] = {}
         if self.db_name:
             params["DBName"] = self.db_name
         if self.cluster_type:
@@ -283,7 +285,7 @@ class RedshiftCreateClusterSnapshotOperator(BaseOperator):
         self.max_attempt = max_attempt
         self.redshift_hook = RedshiftHook(aws_conn_id=aws_conn_id)
 
-    def execute(self, context: "Context") -> Any:
+    def execute(self, context: Context) -> Any:
         cluster_state = self.redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
         if cluster_state != "available":
             raise AirflowException(
@@ -342,7 +344,7 @@ class RedshiftDeleteClusterSnapshotOperator(BaseOperator):
         self.poll_interval = poll_interval
         self.redshift_hook = RedshiftHook(aws_conn_id=aws_conn_id)
 
-    def execute(self, context: "Context") -> Any:
+    def execute(self, context: Context) -> Any:
         self.redshift_hook.get_conn().delete_cluster_snapshot(
             SnapshotClusterIdentifier=self.cluster_identifier,
             SnapshotIdentifier=self.snapshot_identifier,
@@ -386,7 +388,7 @@ class RedshiftResumeClusterOperator(BaseOperator):
         self.cluster_identifier = cluster_identifier
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         redshift_hook = RedshiftHook(aws_conn_id=self.aws_conn_id)
         cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
         if cluster_state == 'paused':
@@ -425,7 +427,7 @@ class RedshiftPauseClusterOperator(BaseOperator):
         self.cluster_identifier = cluster_identifier
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         redshift_hook = RedshiftHook(aws_conn_id=self.aws_conn_id)
         cluster_state = redshift_hook.cluster_status(cluster_identifier=self.cluster_identifier)
         if cluster_state == 'available':
@@ -463,7 +465,7 @@ class RedshiftDeleteClusterOperator(BaseOperator):
         *,
         cluster_identifier: str,
         skip_final_cluster_snapshot: bool = True,
-        final_cluster_snapshot_identifier: Optional[str] = None,
+        final_cluster_snapshot_identifier: str | None = None,
         wait_for_completion: bool = True,
         aws_conn_id: str = "aws_default",
         poll_interval: float = 30.0,
@@ -477,7 +479,7 @@ class RedshiftDeleteClusterOperator(BaseOperator):
         self.redshift_hook = RedshiftHook(aws_conn_id=aws_conn_id)
         self.poll_interval = poll_interval
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         self.delete_cluster()
 
         if self.wait_for_completion:
diff --git a/airflow/providers/amazon/aws/operators/redshift_data.py b/airflow/providers/amazon/aws/operators/redshift_data.py
index 41d1734789..224d374ca6 100644
--- a/airflow/providers/amazon/aws/operators/redshift_data.py
+++ b/airflow/providers/amazon/aws/operators/redshift_data.py
@@ -15,8 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from time import sleep
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
+from typing import TYPE_CHECKING, Any
 
 from airflow.compat.functools import cached_property
 from airflow.models import BaseOperator
@@ -65,17 +67,17 @@ class RedshiftDataOperator(BaseOperator):
     def __init__(
         self,
         database: str,
-        sql: Union[str, List],
-        cluster_identifier: Optional[str] = None,
-        db_user: Optional[str] = None,
-        parameters: Optional[list] = None,
-        secret_arn: Optional[str] = None,
-        statement_name: Optional[str] = None,
+        sql: str | list,
+        cluster_identifier: str | None = None,
+        db_user: str | None = None,
+        parameters: list | None = None,
+        secret_arn: str | None = None,
+        statement_name: str | None = None,
         with_event: bool = False,
         await_result: bool = True,
         poll_interval: int = 10,
         aws_conn_id: str = 'aws_default',
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -105,7 +107,7 @@ class RedshiftDataOperator(BaseOperator):
         return RedshiftDataHook(aws_conn_id=self.aws_conn_id, region_name=self.region)
 
     def execute_query(self):
-        kwargs: Dict[str, Any] = {
+        kwargs: dict[str, Any] = {
             "ClusterIdentifier": self.cluster_identifier,
             "Database": self.database,
             "Sql": self.sql,
@@ -120,7 +122,7 @@ class RedshiftDataOperator(BaseOperator):
         return resp['Id']
 
     def execute_batch_query(self):
-        kwargs: Dict[str, Any] = {
+        kwargs: dict[str, Any] = {
             "ClusterIdentifier": self.cluster_identifier,
             "Database": self.database,
             "Sqls": self.sql,
@@ -148,7 +150,7 @@ class RedshiftDataOperator(BaseOperator):
                 self.log.info("Query %s", status)
             sleep(self.poll_interval)
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         """Execute a statement against Amazon Redshift"""
         self.log.info("Executing statement: %s", self.sql)
         if isinstance(self.sql, list):
diff --git a/airflow/providers/amazon/aws/operators/redshift_sql.py b/airflow/providers/amazon/aws/operators/redshift_sql.py
index aa324b40bc..446cff70a7 100644
--- a/airflow/providers/amazon/aws/operators/redshift_sql.py
+++ b/airflow/providers/amazon/aws/operators/redshift_sql.py
@@ -14,8 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Iterable, Mapping, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook
@@ -53,9 +54,9 @@ class RedshiftSQLOperator(BaseOperator):
     def __init__(
         self,
         *,
-        sql: Union[str, Iterable[str]],
+        sql: str | Iterable[str],
         redshift_conn_id: str = 'redshift_default',
-        parameters: Optional[Union[Iterable, Mapping]] = None,
+        parameters: Iterable | Mapping | None = None,
         autocommit: bool = True,
         **kwargs,
     ) -> None:
@@ -71,7 +72,7 @@ class RedshiftSQLOperator(BaseOperator):
         """
         return RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         """Execute a statement against Amazon Redshift"""
         self.log.info("Executing statement: %s", self.sql)
         hook = self.get_hook()
diff --git a/airflow/providers/amazon/aws/operators/s3.py b/airflow/providers/amazon/aws/operators/s3.py
index af45eb9a4c..698af522fd 100644
--- a/airflow/providers/amazon/aws/operators/s3.py
+++ b/airflow/providers/amazon/aws/operators/s3.py
@@ -15,13 +15,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
-
 """This module contains AWS S3 operators."""
+from __future__ import annotations
+
 import subprocess
 import sys
 from tempfile import NamedTemporaryFile
-from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
@@ -57,8 +57,8 @@ class S3CreateBucketOperator(BaseOperator):
         self,
         *,
         bucket_name: str,
-        aws_conn_id: Optional[str] = "aws_default",
-        region_name: Optional[str] = None,
+        aws_conn_id: str | None = "aws_default",
+        region_name: str | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -66,7 +66,7 @@ class S3CreateBucketOperator(BaseOperator):
         self.region_name = region_name
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
         if not s3_hook.check_for_bucket(self.bucket_name):
             s3_hook.create_bucket(bucket_name=self.bucket_name, region_name=self.region_name)
@@ -98,7 +98,7 @@ class S3DeleteBucketOperator(BaseOperator):
         self,
         bucket_name: str,
         force_delete: bool = False,
-        aws_conn_id: Optional[str] = "aws_default",
+        aws_conn_id: str | None = "aws_default",
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -106,7 +106,7 @@ class S3DeleteBucketOperator(BaseOperator):
         self.force_delete = force_delete
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
         if s3_hook.check_for_bucket(self.bucket_name):
             s3_hook.delete_bucket(bucket_name=self.bucket_name, force_delete=self.force_delete)
@@ -133,12 +133,12 @@ class S3GetBucketTaggingOperator(BaseOperator):
 
     template_fields: Sequence[str] = ("bucket_name",)
 
-    def __init__(self, bucket_name: str, aws_conn_id: Optional[str] = "aws_default", **kwargs) -> None:
+    def __init__(self, bucket_name: str, aws_conn_id: str | None = "aws_default", **kwargs) -> None:
         super().__init__(**kwargs)
         self.bucket_name = bucket_name
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
 
         if s3_hook.check_for_bucket(self.bucket_name):
@@ -176,10 +176,10 @@ class S3PutBucketTaggingOperator(BaseOperator):
     def __init__(
         self,
         bucket_name: str,
-        key: Optional[str] = None,
-        value: Optional[str] = None,
-        tag_set: Optional[List[Dict[str, str]]] = None,
-        aws_conn_id: Optional[str] = "aws_default",
+        key: str | None = None,
+        value: str | None = None,
+        tag_set: list[dict[str, str]] | None = None,
+        aws_conn_id: str | None = "aws_default",
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -189,7 +189,7 @@ class S3PutBucketTaggingOperator(BaseOperator):
         self.bucket_name = bucket_name
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
 
         if s3_hook.check_for_bucket(self.bucket_name):
@@ -220,12 +220,12 @@ class S3DeleteBucketTaggingOperator(BaseOperator):
 
     template_fields: Sequence[str] = ("bucket_name",)
 
-    def __init__(self, bucket_name: str, aws_conn_id: Optional[str] = "aws_default", **kwargs) -> None:
+    def __init__(self, bucket_name: str, aws_conn_id: str | None = "aws_default", **kwargs) -> None:
         super().__init__(**kwargs)
         self.bucket_name = bucket_name
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
 
         if s3_hook.check_for_bucket(self.bucket_name):
@@ -290,12 +290,12 @@ class S3CopyObjectOperator(BaseOperator):
         *,
         source_bucket_key: str,
         dest_bucket_key: str,
-        source_bucket_name: Optional[str] = None,
-        dest_bucket_name: Optional[str] = None,
-        source_version_id: Optional[str] = None,
+        source_bucket_name: str | None = None,
+        dest_bucket_name: str | None = None,
+        source_version_id: str | None = None,
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[str, bool]] = None,
-        acl_policy: Optional[str] = None,
+        verify: str | bool | None = None,
+        acl_policy: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -309,7 +309,7 @@ class S3CopyObjectOperator(BaseOperator):
         self.verify = verify
         self.acl_policy = acl_policy
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
         s3_hook.copy_object(
             self.source_bucket_key,
@@ -364,16 +364,16 @@ class S3CreateObjectOperator(BaseOperator):
     def __init__(
         self,
         *,
-        s3_bucket: Optional[str] = None,
+        s3_bucket: str | None = None,
         s3_key: str,
-        data: Union[str, bytes],
+        data: str | bytes,
         replace: bool = False,
         encrypt: bool = False,
-        acl_policy: Optional[str] = None,
-        encoding: Optional[str] = None,
-        compression: Optional[str] = None,
+        acl_policy: str | None = None,
+        encoding: str | None = None,
+        compression: str | None = None,
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[str, bool]] = None,
+        verify: str | bool | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -389,7 +389,7 @@ class S3CreateObjectOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.verify = verify
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
 
         s3_bucket, s3_key = s3_hook.get_s3_bucket_key(self.s3_bucket, self.s3_key, 'dest_bucket', 'dest_key')
@@ -449,10 +449,10 @@ class S3DeleteObjectsOperator(BaseOperator):
         self,
         *,
         bucket: str,
-        keys: Optional[Union[str, list]] = None,
-        prefix: Optional[str] = None,
+        keys: str | list | None = None,
+        prefix: str | None = None,
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[str, bool]] = None,
+        verify: str | bool | None = None,
         **kwargs,
     ):
 
@@ -466,7 +466,7 @@ class S3DeleteObjectsOperator(BaseOperator):
         if not bool(keys is None) ^ bool(prefix is None):
             raise AirflowException("Either keys or prefix should be set.")
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         if not bool(self.keys is None) ^ bool(self.prefix is None):
             raise AirflowException("Either keys or prefix should be set.")
 
@@ -533,13 +533,13 @@ class S3FileTransformOperator(BaseOperator):
         *,
         source_s3_key: str,
         dest_s3_key: str,
-        transform_script: Optional[str] = None,
+        transform_script: str | None = None,
         select_expression=None,
-        script_args: Optional[Sequence[str]] = None,
+        script_args: Sequence[str] | None = None,
         source_aws_conn_id: str = 'aws_default',
-        source_verify: Optional[Union[bool, str]] = None,
+        source_verify: bool | str | None = None,
         dest_aws_conn_id: str = 'aws_default',
-        dest_verify: Optional[Union[bool, str]] = None,
+        dest_verify: bool | str | None = None,
         replace: bool = False,
         **kwargs,
     ) -> None:
@@ -557,7 +557,7 @@ class S3FileTransformOperator(BaseOperator):
         self.script_args = script_args or []
         self.output_encoding = sys.getdefaultencoding()
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         if self.transform_script is None and self.select_expression is None:
             raise AirflowException("Either transform_script or select_expression must be specified")
 
@@ -662,7 +662,7 @@ class S3ListOperator(BaseOperator):
         prefix: str = '',
         delimiter: str = '',
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[str, bool]] = None,
+        verify: str | bool | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -672,7 +672,7 @@ class S3ListOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.verify = verify
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
 
         self.log.info(
@@ -736,7 +736,7 @@ class S3ListPrefixesOperator(BaseOperator):
         prefix: str,
         delimiter: str,
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[str, bool]] = None,
+        verify: str | bool | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -746,7 +746,7 @@ class S3ListPrefixesOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.verify = verify
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
 
         self.log.info(
diff --git a/airflow/providers/amazon/aws/operators/sagemaker.py b/airflow/providers/amazon/aws/operators/sagemaker.py
index d82dfa5f29..a85c0ca5ba 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker.py
@@ -14,9 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import json
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Any, Sequence
 
 from botocore.exceptions import ClientError
 
@@ -34,7 +35,7 @@ DEFAULT_CONN_ID: str = 'aws_default'
 CHECK_INTERVAL_SECOND: int = 30
 
 
-def serialize(result: Dict) -> str:
+def serialize(result: dict) -> str:
     return json.loads(json.dumps(result, cls=AirflowJsonEncoder))
 
 
@@ -46,15 +47,15 @@ class SageMakerBaseOperator(BaseOperator):
 
     template_fields: Sequence[str] = ('config',)
     template_ext: Sequence[str] = ()
-    template_fields_renderers: Dict = {'config': 'json'}
+    template_fields_renderers: dict = {'config': 'json'}
     ui_color: str = '#ededed'
-    integer_fields: List[List[Any]] = []
+    integer_fields: list[list[Any]] = []
 
-    def __init__(self, *, config: Dict, **kwargs):
+    def __init__(self, *, config: dict, **kwargs):
         super().__init__(**kwargs)
         self.config = config
 
-    def parse_integer(self, config: Dict, field: Union[List[str], str]) -> None:
+    def parse_integer(self, config: dict, field: list[str] | str) -> None:
         """Recursive method for parsing string fields holding integer values to integers."""
         if len(field) == 1:
             if isinstance(config, list):
@@ -101,7 +102,7 @@ class SageMakerBaseOperator(BaseOperator):
         """
         self.integer_fields = []
 
-    def execute(self, context: 'Context') -> Union[None, Dict]:
+    def execute(self, context: Context) -> None | dict:
         raise NotImplementedError('Please implement execute() in sub class!')
 
     @cached_property
@@ -140,12 +141,12 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
     def __init__(
         self,
         *,
-        config: Dict,
+        config: dict,
         aws_conn_id: str = DEFAULT_CONN_ID,
         wait_for_completion: bool = True,
         print_log: bool = True,
         check_interval: int = CHECK_INTERVAL_SECOND,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
         action_if_job_exists: str = 'increment',
         **kwargs,
     ):
@@ -164,7 +165,7 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
 
     def _create_integer_fields(self) -> None:
         """Set fields which should be cast to integers."""
-        self.integer_fields: List[Union[List[str], List[List[str]]]] = [
+        self.integer_fields: list[list[str] | list[list[str]]] = [
             ['ProcessingResources', 'ClusterConfig', 'InstanceCount'],
             ['ProcessingResources', 'ClusterConfig', 'VolumeSizeInGB'],
         ]
@@ -177,7 +178,7 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
             hook = AwsBaseHook(self.aws_conn_id, client_type='iam')
             self.config['RoleArn'] = hook.expand_role(self.config['RoleArn'])
 
-    def execute(self, context: 'Context') -> Dict:
+    def execute(self, context: Context) -> dict:
         self.preprocess_config()
         processing_job_name = self.config['ProcessingJobName']
         if self.hook.find_processing_job_by_name(processing_job_name):
@@ -217,7 +218,7 @@ class SageMakerEndpointConfigOperator(SageMakerBaseOperator):
     def __init__(
         self,
         *,
-        config: Dict,
+        config: dict,
         aws_conn_id: str = DEFAULT_CONN_ID,
         **kwargs,
     ):
@@ -227,9 +228,9 @@ class SageMakerEndpointConfigOperator(SageMakerBaseOperator):
 
     def _create_integer_fields(self) -> None:
         """Set fields which should be cast to integers."""
-        self.integer_fields: List[List[str]] = [['ProductionVariants', 'InitialInstanceCount']]
+        self.integer_fields: list[list[str]] = [['ProductionVariants', 'InitialInstanceCount']]
 
-    def execute(self, context: 'Context') -> Dict:
+    def execute(self, context: Context) -> dict:
         self.preprocess_config()
         self.log.info('Creating SageMaker Endpoint Config %s.', self.config['EndpointConfigName'])
         response = self.hook.create_endpoint_config(self.config)
@@ -290,11 +291,11 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
     def __init__(
         self,
         *,
-        config: Dict,
+        config: dict,
         aws_conn_id: str = DEFAULT_CONN_ID,
         wait_for_completion: bool = True,
         check_interval: int = CHECK_INTERVAL_SECOND,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
         operation: str = 'create',
         **kwargs,
     ):
@@ -311,7 +312,7 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
     def _create_integer_fields(self) -> None:
         """Set fields which should be cast to integers."""
         if 'EndpointConfig' in self.config:
-            self.integer_fields: List[List[str]] = [
+            self.integer_fields: list[list[str]] = [
                 ['EndpointConfig', 'ProductionVariants', 'InitialInstanceCount']
             ]
 
@@ -324,7 +325,7 @@ class SageMakerEndpointOperator(SageMakerBaseOperator):
         if 'ExecutionRoleArn' in config:
             config['ExecutionRoleArn'] = hook.expand_role(config['ExecutionRoleArn'])
 
-    def execute(self, context: 'Context') -> Dict:
+    def execute(self, context: Context) -> dict:
         self.preprocess_config()
         model_info = self.config.get('Model')
         endpoint_config_info = self.config.get('EndpointConfig')
@@ -418,11 +419,11 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
     def __init__(
         self,
         *,
-        config: Dict,
+        config: dict,
         aws_conn_id: str = DEFAULT_CONN_ID,
         wait_for_completion: bool = True,
         check_interval: int = CHECK_INTERVAL_SECOND,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
         check_if_job_exists: bool = True,
         action_if_job_exists: str = 'increment',
         **kwargs,
@@ -444,7 +445,7 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
 
     def _create_integer_fields(self) -> None:
         """Set fields which should be cast to integers."""
-        self.integer_fields: List[List[str]] = [
+        self.integer_fields: list[list[str]] = [
             ['Transform', 'TransformResources', 'InstanceCount'],
             ['Transform', 'MaxConcurrentTransforms'],
             ['Transform', 'MaxPayloadInMB'],
@@ -462,7 +463,7 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
             hook = AwsBaseHook(self.aws_conn_id, client_type='iam')
             config['ExecutionRoleArn'] = hook.expand_role(config['ExecutionRoleArn'])
 
-    def execute(self, context: 'Context') -> Dict:
+    def execute(self, context: Context) -> dict:
         self.preprocess_config()
         model_config = self.config.get('Model')
         transform_config = self.config.get('Transform', self.config)
@@ -533,11 +534,11 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
     def __init__(
         self,
         *,
-        config: Dict,
+        config: dict,
         aws_conn_id: str = DEFAULT_CONN_ID,
         wait_for_completion: bool = True,
         check_interval: int = CHECK_INTERVAL_SECOND,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
         **kwargs,
     ):
         super().__init__(config=config, **kwargs)
@@ -557,7 +558,7 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
 
     def _create_integer_fields(self) -> None:
         """Set fields which should be cast to integers."""
-        self.integer_fields: List[List[str]] = [
+        self.integer_fields: list[list[str]] = [
             ['HyperParameterTuningJobConfig', 'ResourceLimits', 'MaxNumberOfTrainingJobs'],
             ['HyperParameterTuningJobConfig', 'ResourceLimits', 'MaxParallelTrainingJobs'],
             ['TrainingJobDefinition', 'ResourceConfig', 'InstanceCount'],
@@ -565,7 +566,7 @@ class SageMakerTuningOperator(SageMakerBaseOperator):
             ['TrainingJobDefinition', 'StoppingCondition', 'MaxRuntimeInSeconds'],
         ]
 
-    def execute(self, context: 'Context') -> Dict:
+    def execute(self, context: Context) -> dict:
         self.preprocess_config()
         self.log.info(
             'Creating SageMaker Hyper-Parameter Tuning Job %s', self.config['HyperParameterTuningJobName']
@@ -602,7 +603,7 @@ class SageMakerModelOperator(SageMakerBaseOperator):
     :return Dict: Returns The ARN of the model created in Amazon SageMaker.
     """
 
-    def __init__(self, *, config: Dict, aws_conn_id: str = DEFAULT_CONN_ID, **kwargs):
+    def __init__(self, *, config: dict, aws_conn_id: str = DEFAULT_CONN_ID, **kwargs):
         super().__init__(config=config, **kwargs)
         self.config = config
         self.aws_conn_id = aws_conn_id
@@ -613,7 +614,7 @@ class SageMakerModelOperator(SageMakerBaseOperator):
             hook = AwsBaseHook(self.aws_conn_id, client_type='iam')
             self.config['ExecutionRoleArn'] = hook.expand_role(self.config['ExecutionRoleArn'])
 
-    def execute(self, context: 'Context') -> Dict:
+    def execute(self, context: Context) -> dict:
         self.preprocess_config()
         self.log.info('Creating SageMaker Model %s.', self.config['ModelName'])
         response = self.hook.create_model(self.config)
@@ -655,12 +656,12 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
     def __init__(
         self,
         *,
-        config: Dict,
+        config: dict,
         aws_conn_id: str = DEFAULT_CONN_ID,
         wait_for_completion: bool = True,
         print_log: bool = True,
         check_interval: int = CHECK_INTERVAL_SECOND,
-        max_ingestion_time: Optional[int] = None,
+        max_ingestion_time: int | None = None,
         check_if_job_exists: bool = True,
         action_if_job_exists: str = 'increment',
         **kwargs,
@@ -688,13 +689,13 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
 
     def _create_integer_fields(self) -> None:
         """Set fields which should be cast to integers."""
-        self.integer_fields: List[List[str]] = [
+        self.integer_fields: list[list[str]] = [
             ['ResourceConfig', 'InstanceCount'],
             ['ResourceConfig', 'VolumeSizeInGB'],
             ['StoppingCondition', 'MaxRuntimeInSeconds'],
         ]
 
-    def execute(self, context: 'Context') -> Dict:
+    def execute(self, context: Context) -> dict:
         self.preprocess_config()
         if self.check_if_job_exists:
             self._check_if_job_exists()
@@ -739,12 +740,12 @@ class SageMakerDeleteModelOperator(SageMakerBaseOperator):
     :param aws_conn_id: The AWS connection ID to use.
     """
 
-    def __init__(self, *, config: Dict, aws_conn_id: str = DEFAULT_CONN_ID, **kwargs):
+    def __init__(self, *, config: dict, aws_conn_id: str = DEFAULT_CONN_ID, **kwargs):
         super().__init__(config=config, **kwargs)
         self.config = config
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context') -> Any:
+    def execute(self, context: Context) -> Any:
         sagemaker_hook = SageMakerHook(aws_conn_id=self.aws_conn_id)
         sagemaker_hook.delete_model(model_name=self.config['ModelName'])
         self.log.info("Model %s deleted successfully.", self.config['ModelName'])
diff --git a/airflow/providers/amazon/aws/operators/sns.py b/airflow/providers/amazon/aws/operators/sns.py
index d9dd3c4531..8f64211512 100644
--- a/airflow/providers/amazon/aws/operators/sns.py
+++ b/airflow/providers/amazon/aws/operators/sns.py
@@ -15,9 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 """Publish message to SNS queue"""
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.sns import SnsHook
@@ -51,8 +52,8 @@ class SnsPublishOperator(BaseOperator):
         *,
         target_arn: str,
         message: str,
-        subject: Optional[str] = None,
-        message_attributes: Optional[dict] = None,
+        subject: str | None = None,
+        message_attributes: dict | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -63,7 +64,7 @@ class SnsPublishOperator(BaseOperator):
         self.message_attributes = message_attributes
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         sns = SnsHook(aws_conn_id=self.aws_conn_id)
 
         self.log.info(
diff --git a/airflow/providers/amazon/aws/operators/sqs.py b/airflow/providers/amazon/aws/operators/sqs.py
index 57b8a455d7..f2fe56299c 100644
--- a/airflow/providers/amazon/aws/operators/sqs.py
+++ b/airflow/providers/amazon/aws/operators/sqs.py
@@ -14,9 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """Publish message to SQS queue"""
-from typing import TYPE_CHECKING, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.sqs import SqsHook
@@ -58,9 +59,9 @@ class SqsPublishOperator(BaseOperator):
         *,
         sqs_queue: str,
         message_content: str,
-        message_attributes: Optional[dict] = None,
+        message_attributes: dict | None = None,
         delay_seconds: int = 0,
-        message_group_id: Optional[str] = None,
+        message_group_id: str | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ):
@@ -72,7 +73,7 @@ class SqsPublishOperator(BaseOperator):
         self.message_attributes = message_attributes or {}
         self.message_group_id = message_group_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """
         Publish the message to the Amazon SQS queue
 
diff --git a/airflow/providers/amazon/aws/operators/step_function.py b/airflow/providers/amazon/aws/operators/step_function.py
index 7c32b33890..7c1ca63b48 100644
--- a/airflow/providers/amazon/aws/operators/step_function.py
+++ b/airflow/providers/amazon/aws/operators/step_function.py
@@ -14,10 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
+from __future__ import annotations
 
 import json
-from typing import TYPE_CHECKING, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
@@ -52,10 +52,10 @@ class StepFunctionStartExecutionOperator(BaseOperator):
         self,
         *,
         state_machine_arn: str,
-        name: Optional[str] = None,
-        state_machine_input: Union[dict, str, None] = None,
+        name: str | None = None,
+        state_machine_input: dict | str | None = None,
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -65,7 +65,7 @@ class StepFunctionStartExecutionOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         hook = StepFunctionHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
 
         execution_arn = hook.start_execution(self.state_machine_arn, self.name, self.input)
@@ -101,7 +101,7 @@ class StepFunctionGetExecutionOutputOperator(BaseOperator):
         *,
         execution_arn: str,
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -109,7 +109,7 @@ class StepFunctionGetExecutionOutputOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         hook = StepFunctionHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
 
         execution_status = hook.describe_execution(self.execution_arn)
diff --git a/airflow/providers/amazon/aws/secrets/secrets_manager.py b/airflow/providers/amazon/aws/secrets/secrets_manager.py
index f333ee5584..99192504b8 100644
--- a/airflow/providers/amazon/aws/secrets/secrets_manager.py
+++ b/airflow/providers/amazon/aws/secrets/secrets_manager.py
@@ -16,11 +16,12 @@
 # specific language governing permissions and limitations
 # under the License.
 """Objects relating to sourcing secrets from AWS Secrets Manager"""
+from __future__ import annotations
 
 import ast
 import json
 import warnings
-from typing import TYPE_CHECKING, Any, Dict, List, Optional
+from typing import TYPE_CHECKING, Any
 from urllib.parse import unquote, urlencode
 
 from airflow.compat.functools import cached_property
@@ -115,8 +116,8 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
         config_prefix: str = 'airflow/config',
         sep: str = "/",
         full_url_mode: bool = True,
-        are_secret_values_urlencoded: Optional[bool] = None,
-        extra_conn_words: Optional[Dict[str, List[str]]] = None,
+        are_secret_values_urlencoded: bool | None = None,
+        extra_conn_words: dict[str, list[str]] | None = None,
         **kwargs,
     ):
         super().__init__()
@@ -196,7 +197,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
 
         return conn_string
 
-    def get_connection(self, conn_id: str) -> Optional["Connection"]:
+    def get_connection(self, conn_id: str) -> Connection | None:
         if not self.full_url_mode:
             # Avoid circular import problems when instantiating the backend during configuration.
             from airflow.models.connection import Connection
@@ -215,7 +216,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
             if self.are_secret_values_urlencoded:
                 data = self._remove_escaping_in_secret_dict(secret=data, conn_id=conn_id)
 
-            port: Optional[int] = None
+            port: int | None = None
 
             if data['port'] is not None:
                 port = int(data['port'])
@@ -233,7 +234,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
 
         return super().get_connection(conn_id=conn_id)
 
-    def _standardize_secret_keys(self, secret: Dict[str, Any]) -> Dict[str, Any]:
+    def _standardize_secret_keys(self, secret: dict[str, Any]) -> dict[str, Any]:
         """Standardize the names of the keys in the dict. These keys align with"""
         possible_words_for_conn_fields = {
             'user': ['user', 'username', 'login', 'user_name'],
@@ -248,7 +249,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
         for conn_field, extra_words in self.extra_conn_words.items():
             possible_words_for_conn_fields[conn_field].extend(extra_words)
 
-        conn_d: Dict[str, Any] = {}
+        conn_d: dict[str, Any] = {}
         for conn_field, possible_words in possible_words_for_conn_fields.items():
             try:
                 conn_d[conn_field] = [v for k, v in secret.items() if k in possible_words][0]
@@ -257,19 +258,19 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
 
         return conn_d
 
-    def get_uri_from_secret(self, secret: Dict[str, str]) -> str:
-        conn_d: Dict[str, str] = {k: v if v else '' for k, v in self._standardize_secret_keys(secret).items()}
+    def get_uri_from_secret(self, secret: dict[str, str]) -> str:
+        conn_d: dict[str, str] = {k: v if v else '' for k, v in self._standardize_secret_keys(secret).items()}
         conn_string = "{conn_type}://{user}:{password}@{host}:{port}/{schema}".format(**conn_d)
         return self._format_uri_with_extra(secret, conn_string)
 
-    def _deserialize_json_string(self, value: Optional[str]) -> Optional[Dict[Any, Any]]:
+    def _deserialize_json_string(self, value: str | None) -> dict[Any, Any] | None:
         if not value:
             return None
         try:
             # Use ast.literal_eval for backwards compatibility.
             # Previous version of this code had a comment saying that using json.loads caused errors.
             # This likely means people were using dict reprs instead of valid JSONs.
-            res: Dict[str, Any] = json.loads(value)
+            res: dict[str, Any] = json.loads(value)
         except json.JSONDecodeError:
             try:
                 res = ast.literal_eval(value) if value else None
@@ -282,7 +283,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
 
         return res
 
-    def _remove_escaping_in_secret_dict(self, secret: Dict[str, Any], conn_id: str) -> Dict[str, Any]:
+    def _remove_escaping_in_secret_dict(self, secret: dict[str, Any], conn_id: str) -> dict[str, Any]:
         # When ``unquote(v) == v``, then removing unquote won't affect the user, regardless of
         # whether or not ``v`` is URL-encoded. For example, "foo bar" is not URL-encoded. But
         # because decoding it doesn't affect the value, then it will migrate safely when
@@ -350,7 +351,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
 
         return secret
 
-    def get_conn_value(self, conn_id: str) -> Optional[str]:
+    def get_conn_value(self, conn_id: str) -> str | None:
         """
         Get serialized representation of Connection
 
@@ -392,7 +393,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
 
         return connection
 
-    def get_conn_uri(self, conn_id: str) -> Optional[str]:
+    def get_conn_uri(self, conn_id: str) -> str | None:
         """
         Return URI representation of Connection conn_id.
 
@@ -410,7 +411,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
             )
         return self.get_conn_value(conn_id)
 
-    def get_variable(self, key: str) -> Optional[str]:
+    def get_variable(self, key: str) -> str | None:
         """
         Get Airflow Variable from Environment Variable
         :param key: Variable Key
@@ -421,7 +422,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
 
         return self._get_secret(self.variables_prefix, key)
 
-    def get_config(self, key: str) -> Optional[str]:
+    def get_config(self, key: str) -> str | None:
         """
         Get Airflow Configuration
         :param key: Configuration Option Key
@@ -432,7 +433,7 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
 
         return self._get_secret(self.config_prefix, key)
 
-    def _get_secret(self, path_prefix, secret_id: str) -> Optional[str]:
+    def _get_secret(self, path_prefix, secret_id: str) -> str | None:
         """
         Get secret value from Secrets Manager
         :param path_prefix: Prefix for the Path to get Secret
diff --git a/airflow/providers/amazon/aws/secrets/systems_manager.py b/airflow/providers/amazon/aws/secrets/systems_manager.py
index fe47cbb543..0e78aa5772 100644
--- a/airflow/providers/amazon/aws/secrets/systems_manager.py
+++ b/airflow/providers/amazon/aws/secrets/systems_manager.py
@@ -16,9 +16,9 @@
 # specific language governing permissions and limitations
 # under the License.
 """Objects relating to sourcing connections from AWS SSM Parameter Store"""
+from __future__ import annotations
 
 import warnings
-from typing import Optional
 
 from airflow.compat.functools import cached_property
 from airflow.providers.amazon.aws.utils import get_airflow_version, trim_none_values
@@ -113,7 +113,7 @@ class SystemsManagerParameterStoreBackend(BaseSecretsBackend, LoggingMixin):
         session = SessionFactory(conn=conn_config).create_session()
         return session.client(service_name="ssm", **client_kwargs)
 
-    def get_conn_value(self, conn_id: str) -> Optional[str]:
+    def get_conn_value(self, conn_id: str) -> str | None:
         """
         Get param value
 
@@ -124,7 +124,7 @@ class SystemsManagerParameterStoreBackend(BaseSecretsBackend, LoggingMixin):
 
         return self._get_secret(self.connections_prefix, conn_id)
 
-    def get_conn_uri(self, conn_id: str) -> Optional[str]:
+    def get_conn_uri(self, conn_id: str) -> str | None:
         """
         Return URI representation of Connection conn_id.
 
@@ -142,7 +142,7 @@ class SystemsManagerParameterStoreBackend(BaseSecretsBackend, LoggingMixin):
             )
         return self.get_conn_value(conn_id)
 
-    def get_variable(self, key: str) -> Optional[str]:
+    def get_variable(self, key: str) -> str | None:
         """
         Get Airflow Variable from Environment Variable
 
@@ -154,7 +154,7 @@ class SystemsManagerParameterStoreBackend(BaseSecretsBackend, LoggingMixin):
 
         return self._get_secret(self.variables_prefix, key)
 
-    def get_config(self, key: str) -> Optional[str]:
+    def get_config(self, key: str) -> str | None:
         """
         Get Airflow Configuration
 
@@ -166,7 +166,7 @@ class SystemsManagerParameterStoreBackend(BaseSecretsBackend, LoggingMixin):
 
         return self._get_secret(self.config_prefix, key)
 
-    def _get_secret(self, path_prefix: str, secret_id: str) -> Optional[str]:
+    def _get_secret(self, path_prefix: str, secret_id: str) -> str | None:
         """
         Get secret value from Parameter Store.
 
diff --git a/airflow/providers/amazon/aws/sensors/athena.py b/airflow/providers/amazon/aws/sensors/athena.py
index 4269ca01dd..60859ac38b 100644
--- a/airflow/providers/amazon/aws/sensors/athena.py
+++ b/airflow/providers/amazon/aws/sensors/athena.py
@@ -15,7 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, Any, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Sequence
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
@@ -62,7 +64,7 @@ class AthenaSensor(BaseSensorOperator):
         self,
         *,
         query_execution_id: str,
-        max_retries: Optional[int] = None,
+        max_retries: int | None = None,
         aws_conn_id: str = 'aws_default',
         sleep_time: int = 10,
         **kwargs: Any,
@@ -73,7 +75,7 @@ class AthenaSensor(BaseSensorOperator):
         self.sleep_time = sleep_time
         self.max_retries = max_retries
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         state = self.hook.poll_query_status(self.query_execution_id, self.max_retries)
 
         if state in self.FAILURE_STATES:
diff --git a/airflow/providers/amazon/aws/sensors/batch.py b/airflow/providers/amazon/aws/sensors/batch.py
index 3ad288c904..2d2bb0b90d 100644
--- a/airflow/providers/amazon/aws/sensors/batch.py
+++ b/airflow/providers/amazon/aws/sensors/batch.py
@@ -14,14 +14,16 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import sys
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 if sys.version_info >= (3, 8):
     from functools import cached_property
 else:
     from cached_property import cached_property
+
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
 from airflow.sensors.base import BaseSensorOperator
@@ -53,16 +55,16 @@ class BatchSensor(BaseSensorOperator):
         *,
         job_id: str,
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
         self.job_id = job_id
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
-        self.hook: Optional[BatchClientHook] = None
+        self.hook: BatchClientHook | None = None
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         job_description = self.get_hook().get_job_description(self.job_id)
         state = job_description['status']
 
@@ -113,7 +115,7 @@ class BatchComputeEnvironmentSensor(BaseSensorOperator):
         self,
         compute_environment: str,
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -129,7 +131,7 @@ class BatchComputeEnvironmentSensor(BaseSensorOperator):
             region_name=self.region_name,
         )
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         response = self.hook.client.describe_compute_environments(
             computeEnvironments=[self.compute_environment]
         )
@@ -178,7 +180,7 @@ class BatchJobQueueSensor(BaseSensorOperator):
         job_queue: str,
         treat_non_existing_as_deleted: bool = False,
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -195,7 +197,7 @@ class BatchJobQueueSensor(BaseSensorOperator):
             region_name=self.region_name,
         )
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         response = self.hook.client.describe_job_queues(jobQueues=[self.job_queue])
 
         if len(response['jobQueues']) == 0:
diff --git a/airflow/providers/amazon/aws/sensors/cloud_formation.py b/airflow/providers/amazon/aws/sensors/cloud_formation.py
index 290da5ad73..75fb480c83 100644
--- a/airflow/providers/amazon/aws/sensors/cloud_formation.py
+++ b/airflow/providers/amazon/aws/sensors/cloud_formation.py
@@ -16,7 +16,9 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module contains sensors for AWS CloudFormation."""
-from typing import TYPE_CHECKING, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
@@ -49,7 +51,7 @@ class CloudFormationCreateStackSensor(BaseSensorOperator):
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         stack_status = self.hook.get_stack_status(self.stack_name)
         if stack_status == 'CREATE_COMPLETE':
             return True
@@ -85,7 +87,7 @@ class CloudFormationDeleteStackSensor(BaseSensorOperator):
         *,
         stack_name: str,
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -93,7 +95,7 @@ class CloudFormationDeleteStackSensor(BaseSensorOperator):
         self.region_name = region_name
         self.stack_name = stack_name
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         stack_status = self.hook.get_stack_status(self.stack_name)
         if stack_status in ('DELETE_COMPLETE', None):
             return True
diff --git a/airflow/providers/amazon/aws/sensors/dms.py b/airflow/providers/amazon/aws/sensors/dms.py
index 0437ee4d98..876032a9d4 100644
--- a/airflow/providers/amazon/aws/sensors/dms.py
+++ b/airflow/providers/amazon/aws/sensors/dms.py
@@ -15,8 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Iterable, Optional, Sequence
+from typing import TYPE_CHECKING, Iterable, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.dms import DmsHook
@@ -47,8 +48,8 @@ class DmsTaskBaseSensor(BaseSensorOperator):
         self,
         replication_task_arn: str,
         aws_conn_id='aws_default',
-        target_statuses: Optional[Iterable[str]] = None,
-        termination_statuses: Optional[Iterable[str]] = None,
+        target_statuses: Iterable[str] | None = None,
+        termination_statuses: Iterable[str] | None = None,
         *args,
         **kwargs,
     ):
@@ -57,7 +58,7 @@ class DmsTaskBaseSensor(BaseSensorOperator):
         self.replication_task_arn = replication_task_arn
         self.target_statuses: Iterable[str] = target_statuses or []
         self.termination_statuses: Iterable[str] = termination_statuses or []
-        self.hook: Optional[DmsHook] = None
+        self.hook: DmsHook | None = None
 
     def get_hook(self) -> DmsHook:
         """Get DmsHook"""
@@ -67,8 +68,8 @@ class DmsTaskBaseSensor(BaseSensorOperator):
         self.hook = DmsHook(self.aws_conn_id)
         return self.hook
 
-    def poke(self, context: 'Context'):
-        status: Optional[str] = self.get_hook().get_task_status(self.replication_task_arn)
+    def poke(self, context: Context):
+        status: str | None = self.get_hook().get_task_status(self.replication_task_arn)
 
         if not status:
             raise AirflowException(
diff --git a/airflow/providers/amazon/aws/sensors/ec2.py b/airflow/providers/amazon/aws/sensors/ec2.py
index 7d4a640593..bfdb8fcd41 100644
--- a/airflow/providers/amazon/aws/sensors/ec2.py
+++ b/airflow/providers/amazon/aws/sensors/ec2.py
@@ -15,9 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
 from airflow.sensors.base import BaseSensorOperator
@@ -51,7 +51,7 @@ class EC2InstanceStateSensor(BaseSensorOperator):
         target_state: str,
         instance_id: str,
         aws_conn_id: str = "aws_default",
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         if target_state not in self.valid_states:
@@ -62,7 +62,7 @@ class EC2InstanceStateSensor(BaseSensorOperator):
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         ec2_hook = EC2Hook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
         instance_state = ec2_hook.get_instance_state(instance_id=self.instance_id)
         self.log.info("instance state: %s", instance_state)
diff --git a/airflow/providers/amazon/aws/sensors/ecs.py b/airflow/providers/amazon/aws/sensors/ecs.py
index a048f48378..be695b0b6c 100644
--- a/airflow/providers/amazon/aws/sensors/ecs.py
+++ b/airflow/providers/amazon/aws/sensors/ecs.py
@@ -14,8 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
-from typing import TYPE_CHECKING, Optional, Sequence, Set
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 import boto3
 
@@ -45,9 +46,7 @@ def _check_failed(current_state, target_state, failure_states):
 class EcsBaseSensor(BaseSensorOperator):
     """Contains general sensor behavior for Elastic Container Service."""
 
-    def __init__(
-        self, *, aws_conn_id: Optional[str] = DEFAULT_CONN_ID, region: Optional[str] = None, **kwargs
-    ):
+    def __init__(self, *, aws_conn_id: str | None = DEFAULT_CONN_ID, region: str | None = None, **kwargs):
         self.aws_conn_id = aws_conn_id
         self.region = region
         super().__init__(**kwargs)
@@ -84,8 +83,8 @@ class EcsClusterStateSensor(EcsBaseSensor):
         self,
         *,
         cluster_name: str,
-        target_state: Optional[EcsClusterStates] = EcsClusterStates.ACTIVE,
-        failure_states: Optional[Set[EcsClusterStates]] = None,
+        target_state: EcsClusterStates | None = EcsClusterStates.ACTIVE,
+        failure_states: set[EcsClusterStates] | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -93,7 +92,7 @@ class EcsClusterStateSensor(EcsBaseSensor):
         self.target_state = target_state
         self.failure_states = failure_states or {EcsClusterStates.FAILED, EcsClusterStates.INACTIVE}
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         cluster_state = EcsClusterStates(self.hook.get_cluster_state(cluster_name=self.cluster_name))
 
         self.log.info("Cluster state: %s, waiting for: %s", cluster_state, self.target_state)
@@ -123,7 +122,7 @@ class EcsTaskDefinitionStateSensor(EcsBaseSensor):
         self,
         *,
         task_definition: str,
-        target_state: Optional[EcsTaskDefinitionStates] = EcsTaskDefinitionStates.ACTIVE,
+        target_state: EcsTaskDefinitionStates | None = EcsTaskDefinitionStates.ACTIVE,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -138,7 +137,7 @@ class EcsTaskDefinitionStateSensor(EcsBaseSensor):
             )
         }
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         task_definition_state = EcsTaskDefinitionStates(
             self.hook.get_task_definition_state(task_definition=self.task_definition)
         )
@@ -171,8 +170,8 @@ class EcsTaskStateSensor(EcsBaseSensor):
         *,
         cluster: str,
         task: str,
-        target_state: Optional[EcsTaskStates] = EcsTaskStates.RUNNING,
-        failure_states: Optional[Set[EcsTaskStates]] = None,
+        target_state: EcsTaskStates | None = EcsTaskStates.RUNNING,
+        failure_states: set[EcsTaskStates] | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -181,7 +180,7 @@ class EcsTaskStateSensor(EcsBaseSensor):
         self.target_state = target_state
         self.failure_states = failure_states or {EcsTaskStates.STOPPED}
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         task_state = EcsTaskStates(self.hook.get_task_state(cluster=self.cluster, task=self.task))
 
         self.log.info("Task state: %s, waiting for: %s", task_state, self.target_state)
diff --git a/airflow/providers/amazon/aws/sensors/eks.py b/airflow/providers/amazon/aws/sensors/eks.py
index 6c3ab7cdf9..f2ee372151 100644
--- a/airflow/providers/amazon/aws/sensors/eks.py
+++ b/airflow/providers/amazon/aws/sensors/eks.py
@@ -14,9 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
 """Tracking the state of Amazon EKS Clusters, Amazon EKS managed node groups, and AWS Fargate profiles."""
-from typing import TYPE_CHECKING, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.eks import (
@@ -84,7 +85,7 @@ class EksClusterStateSensor(BaseSensorOperator):
         cluster_name: str,
         target_state: ClusterStates = ClusterStates.ACTIVE,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ):
         self.cluster_name = cluster_name
@@ -97,7 +98,7 @@ class EksClusterStateSensor(BaseSensorOperator):
         self.region = region
         super().__init__(**kwargs)
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         eks_hook = EksHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
@@ -152,7 +153,7 @@ class EksFargateProfileStateSensor(BaseSensorOperator):
         fargate_profile_name: str,
         target_state: FargateProfileStates = FargateProfileStates.ACTIVE,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ):
         self.cluster_name = cluster_name
@@ -166,7 +167,7 @@ class EksFargateProfileStateSensor(BaseSensorOperator):
         self.region = region
         super().__init__(**kwargs)
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         eks_hook = EksHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
@@ -223,7 +224,7 @@ class EksNodegroupStateSensor(BaseSensorOperator):
         nodegroup_name: str,
         target_state: NodegroupStates = NodegroupStates.ACTIVE,
         aws_conn_id: str = DEFAULT_CONN_ID,
-        region: Optional[str] = None,
+        region: str | None = None,
         **kwargs,
     ):
         self.cluster_name = cluster_name
@@ -237,7 +238,7 @@ class EksNodegroupStateSensor(BaseSensorOperator):
         self.region = region
         super().__init__(**kwargs)
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         eks_hook = EksHook(
             aws_conn_id=self.aws_conn_id,
             region_name=self.region,
diff --git a/airflow/providers/amazon/aws/sensors/emr.py b/airflow/providers/amazon/aws/sensors/emr.py
index b6c785f87f..0b1c5c686b 100644
--- a/airflow/providers/amazon/aws/sensors/emr.py
+++ b/airflow/providers/amazon/aws/sensors/emr.py
@@ -15,7 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, Any, Dict, FrozenSet, Iterable, Optional, Sequence, Set, Union
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Iterable, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.emr import EmrContainerHook, EmrHook, EmrServerlessHook
@@ -48,7 +50,7 @@ class EmrBaseSensor(BaseSensorOperator):
         self.aws_conn_id = aws_conn_id
         self.target_states: Iterable[str] = []  # will be set in subclasses
         self.failed_states: Iterable[str] = []  # will be set in subclasses
-        self.hook: Optional[EmrHook] = None
+        self.hook: EmrHook | None = None
 
     def get_hook(self) -> EmrHook:
         """Get EmrHook"""
@@ -58,7 +60,7 @@ class EmrBaseSensor(BaseSensorOperator):
         self.hook = EmrHook(aws_conn_id=self.aws_conn_id)
         return self.hook
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         response = self.get_emr_response()
 
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
@@ -80,7 +82,7 @@ class EmrBaseSensor(BaseSensorOperator):
 
         return False
 
-    def get_emr_response(self) -> Dict[str, Any]:
+    def get_emr_response(self) -> dict[str, Any]:
         """
         Make an API call with boto3 and get response.
 
@@ -90,7 +92,7 @@ class EmrBaseSensor(BaseSensorOperator):
         raise NotImplementedError('Please implement get_emr_response() in subclass')
 
     @staticmethod
-    def state_from_response(response: Dict[str, Any]) -> str:
+    def state_from_response(response: dict[str, Any]) -> str:
         """
         Get state from response dictionary.
 
@@ -101,7 +103,7 @@ class EmrBaseSensor(BaseSensorOperator):
         raise NotImplementedError('Please implement state_from_response() in subclass')
 
     @staticmethod
-    def failure_message_from_response(response: Dict[str, Any]) -> Optional[str]:
+    def failure_message_from_response(response: dict[str, Any]) -> str | None:
         """
         Get failure message from response dictionary.
 
@@ -142,7 +144,7 @@ class EmrServerlessJobSensor(BaseSensorOperator):
         *,
         application_id: str,
         job_run_id: str,
-        target_states: Union[Set, FrozenSet] = frozenset(SUCCESS_STATES),
+        target_states: set | frozenset = frozenset(SUCCESS_STATES),
         aws_conn_id: str = 'aws_default',
         **kwargs: Any,
     ) -> None:
@@ -152,7 +154,7 @@ class EmrServerlessJobSensor(BaseSensorOperator):
         self.job_run_id = job_run_id
         super().__init__(**kwargs)
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         response = self.hook.conn.get_job_run(applicationId=self.application_id, jobRunId=self.job_run_id)
 
         state = response['jobRun']['state']
@@ -169,7 +171,7 @@ class EmrServerlessJobSensor(BaseSensorOperator):
         return EmrServerlessHook(aws_conn_id=self.aws_conn_id)
 
     @staticmethod
-    def failure_message_from_response(response: Dict[str, Any]) -> Optional[str]:
+    def failure_message_from_response(response: dict[str, Any]) -> str | None:
         """
         Get failure message from response dictionary.
 
@@ -204,7 +206,7 @@ class EmrServerlessApplicationSensor(BaseSensorOperator):
         self,
         *,
         application_id: str,
-        target_states: Union[Set, FrozenSet] = frozenset(SUCCESS_STATES),
+        target_states: set | frozenset = frozenset(SUCCESS_STATES),
         aws_conn_id: str = 'aws_default',
         **kwargs: Any,
     ) -> None:
@@ -213,7 +215,7 @@ class EmrServerlessApplicationSensor(BaseSensorOperator):
         self.application_id = application_id
         super().__init__(**kwargs)
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         response = self.hook.conn.get_application(applicationId=self.application_id)
 
         state = response['application']['state']
@@ -230,7 +232,7 @@ class EmrServerlessApplicationSensor(BaseSensorOperator):
         return EmrServerlessHook(aws_conn_id=self.aws_conn_id)
 
     @staticmethod
-    def failure_message_from_response(response: Dict[str, Any]) -> Optional[str]:
+    def failure_message_from_response(response: dict[str, Any]) -> str | None:
         """
         Get failure message from response dictionary.
 
@@ -279,7 +281,7 @@ class EmrContainerSensor(BaseSensorOperator):
         *,
         virtual_cluster_id: str,
         job_id: str,
-        max_retries: Optional[int] = None,
+        max_retries: int | None = None,
         aws_conn_id: str = 'aws_default',
         poll_interval: int = 10,
         **kwargs: Any,
@@ -291,7 +293,7 @@ class EmrContainerSensor(BaseSensorOperator):
         self.poll_interval = poll_interval
         self.max_retries = max_retries
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         state = self.hook.poll_query_status(self.job_id, self.max_retries, self.poll_interval)
 
         if state in self.FAILURE_STATES:
@@ -335,8 +337,8 @@ class EmrJobFlowSensor(EmrBaseSensor):
         self,
         *,
         job_flow_id: str,
-        target_states: Optional[Iterable[str]] = None,
-        failed_states: Optional[Iterable[str]] = None,
+        target_states: Iterable[str] | None = None,
+        failed_states: Iterable[str] | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -344,7 +346,7 @@ class EmrJobFlowSensor(EmrBaseSensor):
         self.target_states = target_states or ['TERMINATED']
         self.failed_states = failed_states or ['TERMINATED_WITH_ERRORS']
 
-    def get_emr_response(self) -> Dict[str, Any]:
+    def get_emr_response(self) -> dict[str, Any]:
         """
         Make an API call with boto3 and get cluster-level details.
 
@@ -360,7 +362,7 @@ class EmrJobFlowSensor(EmrBaseSensor):
         return emr_client.describe_cluster(ClusterId=self.job_flow_id)
 
     @staticmethod
-    def state_from_response(response: Dict[str, Any]) -> str:
+    def state_from_response(response: dict[str, Any]) -> str:
         """
         Get state from response dictionary.
 
@@ -371,7 +373,7 @@ class EmrJobFlowSensor(EmrBaseSensor):
         return response['Cluster']['Status']['State']
 
     @staticmethod
-    def failure_message_from_response(response: Dict[str, Any]) -> Optional[str]:
+    def failure_message_from_response(response: dict[str, Any]) -> str | None:
         """
         Get failure message from response dictionary.
 
@@ -416,8 +418,8 @@ class EmrStepSensor(EmrBaseSensor):
         *,
         job_flow_id: str,
         step_id: str,
-        target_states: Optional[Iterable[str]] = None,
-        failed_states: Optional[Iterable[str]] = None,
+        target_states: Iterable[str] | None = None,
+        failed_states: Iterable[str] | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -426,7 +428,7 @@ class EmrStepSensor(EmrBaseSensor):
         self.target_states = target_states or ['COMPLETED']
         self.failed_states = failed_states or ['CANCELLED', 'FAILED', 'INTERRUPTED']
 
-    def get_emr_response(self) -> Dict[str, Any]:
+    def get_emr_response(self) -> dict[str, Any]:
         """
         Make an API call with boto3 and get details about the cluster step.
 
@@ -442,7 +444,7 @@ class EmrStepSensor(EmrBaseSensor):
         return emr_client.describe_step(ClusterId=self.job_flow_id, StepId=self.step_id)
 
     @staticmethod
-    def state_from_response(response: Dict[str, Any]) -> str:
+    def state_from_response(response: dict[str, Any]) -> str:
         """
         Get state from response dictionary.
 
@@ -453,7 +455,7 @@ class EmrStepSensor(EmrBaseSensor):
         return response['Step']['Status']['State']
 
     @staticmethod
-    def failure_message_from_response(response: Dict[str, Any]) -> Optional[str]:
+    def failure_message_from_response(response: dict[str, Any]) -> str | None:
         """
         Get failure message from response dictionary.
 
diff --git a/airflow/providers/amazon/aws/sensors/glacier.py b/airflow/providers/amazon/aws/sensors/glacier.py
index 8e8b74c58c..f75da878f9 100644
--- a/airflow/providers/amazon/aws/sensors/glacier.py
+++ b/airflow/providers/amazon/aws/sensors/glacier.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from enum import Enum
 from typing import TYPE_CHECKING, Any, Sequence
 
@@ -79,7 +81,7 @@ class GlacierJobOperationSensor(BaseSensorOperator):
         self.poke_interval = poke_interval
         self.mode = mode
 
-    def poke(self, context: 'Context') -> bool:
+    def poke(self, context: Context) -> bool:
         hook = GlacierHook(aws_conn_id=self.aws_conn_id)
         response = hook.describe_job(vault_name=self.vault_name, job_id=self.job_id)
 
diff --git a/airflow/providers/amazon/aws/sensors/glue.py b/airflow/providers/amazon/aws/sensors/glue.py
index 68038307ba..30f9190d2a 100644
--- a/airflow/providers/amazon/aws/sensors/glue.py
+++ b/airflow/providers/amazon/aws/sensors/glue.py
@@ -15,7 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, List, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.glue import GlueJobHook
@@ -55,11 +57,11 @@ class GlueJobSensor(BaseSensorOperator):
         self.run_id = run_id
         self.verbose = verbose
         self.aws_conn_id = aws_conn_id
-        self.success_states: List[str] = ['SUCCEEDED']
-        self.errored_states: List[str] = ['FAILED', 'STOPPED', 'TIMEOUT']
-        self.next_log_token: Optional[str] = None
+        self.success_states: list[str] = ['SUCCEEDED']
+        self.errored_states: list[str] = ['FAILED', 'STOPPED', 'TIMEOUT']
+        self.next_log_token: str | None = None
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         hook = GlueJobHook(aws_conn_id=self.aws_conn_id)
         self.log.info('Poking for job run status :for Glue Job %s and ID %s', self.job_name, self.run_id)
         job_state = hook.get_job_state(job_name=self.job_name, run_id=self.run_id)
diff --git a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py
index 6b9df747e7..2052b6d557 100644
--- a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py
+++ b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py
@@ -15,7 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook
 from airflow.sensors.base import BaseSensorOperator
@@ -58,7 +60,7 @@ class GlueCatalogPartitionSensor(BaseSensorOperator):
         table_name: str,
         expression: str = "ds='{{ ds }}'",
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         database_name: str = 'default',
         poke_interval: int = 60 * 3,
         **kwargs,
@@ -69,9 +71,9 @@ class GlueCatalogPartitionSensor(BaseSensorOperator):
         self.table_name = table_name
         self.expression = expression
         self.database_name = database_name
-        self.hook: Optional[GlueCatalogHook] = None
+        self.hook: GlueCatalogHook | None = None
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         """Checks for existence of the partition in the AWS Glue Catalog table"""
         if '.' in self.table_name:
             self.database_name, self.table_name = self.table_name.split('.')
diff --git a/airflow/providers/amazon/aws/sensors/glue_crawler.py b/airflow/providers/amazon/aws/sensors/glue_crawler.py
index f48b453174..37ce98151b 100644
--- a/airflow/providers/amazon/aws/sensors/glue_crawler.py
+++ b/airflow/providers/amazon/aws/sensors/glue_crawler.py
@@ -15,7 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook
@@ -46,9 +48,9 @@ class GlueCrawlerSensor(BaseSensorOperator):
         self.aws_conn_id = aws_conn_id
         self.success_statuses = 'SUCCEEDED'
         self.errored_statuses = ('FAILED', 'CANCELLED')
-        self.hook: Optional[GlueCrawlerHook] = None
+        self.hook: GlueCrawlerHook | None = None
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         hook = self.get_hook()
         self.log.info("Poking for AWS Glue crawler: %s", self.crawler_name)
         crawler_state = hook.get_crawler(self.crawler_name)['State']
diff --git a/airflow/providers/amazon/aws/sensors/quicksight.py b/airflow/providers/amazon/aws/sensors/quicksight.py
index 1d4ac75a08..0b6c43442d 100644
--- a/airflow/providers/amazon/aws/sensors/quicksight.py
+++ b/airflow/providers/amazon/aws/sensors/quicksight.py
@@ -15,8 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.compat.functools import cached_property
 from airflow.exceptions import AirflowException
@@ -61,10 +62,10 @@ class QuickSightSensor(BaseSensorOperator):
         self.aws_conn_id = aws_conn_id
         self.success_status = "COMPLETED"
         self.errored_statuses = ("FAILED", "CANCELLED")
-        self.quicksight_hook: Optional[QuickSightHook] = None
-        self.sts_hook: Optional[StsHook] = None
+        self.quicksight_hook: QuickSightHook | None = None
+        self.sts_hook: StsHook | None = None
 
-    def poke(self, context: "Context"):
+    def poke(self, context: Context):
         """
         Pokes until the QuickSight Ingestion has successfully finished.
 
diff --git a/airflow/providers/amazon/aws/sensors/rds.py b/airflow/providers/amazon/aws/sensors/rds.py
index 204117992d..3fdc37543d 100644
--- a/airflow/providers/amazon/aws/sensors/rds.py
+++ b/airflow/providers/amazon/aws/sensors/rds.py
@@ -14,8 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, List, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from botocore.exceptions import ClientError
 
@@ -34,10 +35,10 @@ class RdsBaseSensor(BaseSensorOperator):
     ui_color = "#ddbb77"
     ui_fgcolor = "#ffffff"
 
-    def __init__(self, *args, aws_conn_id: str = "aws_conn_id", hook_params: Optional[dict] = None, **kwargs):
+    def __init__(self, *args, aws_conn_id: str = "aws_conn_id", hook_params: dict | None = None, **kwargs):
         hook_params = hook_params or {}
         self.hook = RdsHook(aws_conn_id=aws_conn_id, **hook_params)
-        self.target_statuses: List[str] = []
+        self.target_statuses: list[str] = []
         super().__init__(*args, **kwargs)
 
     def _describe_item(self, item_type: str, item_name: str) -> list:
@@ -98,7 +99,7 @@ class RdsSnapshotExistenceSensor(RdsBaseSensor):
         *,
         db_type: str,
         db_snapshot_identifier: str,
-        target_statuses: Optional[List[str]] = None,
+        target_statuses: list[str] | None = None,
         aws_conn_id: str = "aws_conn_id",
         **kwargs,
     ):
@@ -107,7 +108,7 @@ class RdsSnapshotExistenceSensor(RdsBaseSensor):
         self.db_snapshot_identifier = db_snapshot_identifier
         self.target_statuses = target_statuses or ['available']
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         self.log.info(
             'Poking for statuses : %s\nfor snapshot %s', self.target_statuses, self.db_snapshot_identifier
         )
@@ -138,7 +139,7 @@ class RdsExportTaskExistenceSensor(RdsBaseSensor):
         self,
         *,
         export_task_identifier: str,
-        target_statuses: Optional[List[str]] = None,
+        target_statuses: list[str] | None = None,
         aws_conn_id: str = "aws_default",
         **kwargs,
     ):
@@ -153,7 +154,7 @@ class RdsExportTaskExistenceSensor(RdsBaseSensor):
             'canceled',
         ]
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         self.log.info(
             'Poking for statuses : %s\nfor export task %s', self.target_statuses, self.export_task_identifier
         )
@@ -178,7 +179,7 @@ class RdsDbSensor(RdsBaseSensor):
         *,
         db_identifier: str,
         db_type: str = "instance",
-        target_statuses: Optional[List[str]] = None,
+        target_statuses: list[str] | None = None,
         aws_conn_id: str = "aws_default",
         **kwargs,
     ):
@@ -187,7 +188,7 @@ class RdsDbSensor(RdsBaseSensor):
         self.target_statuses = target_statuses or ["available"]
         self.db_type = RdsDbType(db_type)
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         self.log.info(
             "Poking for statuses : %s\nfor db instance %s", self.target_statuses, self.db_identifier
         )
diff --git a/airflow/providers/amazon/aws/sensors/redshift_cluster.py b/airflow/providers/amazon/aws/sensors/redshift_cluster.py
index ae772e95ff..596244af39 100644
--- a/airflow/providers/amazon/aws/sensors/redshift_cluster.py
+++ b/airflow/providers/amazon/aws/sensors/redshift_cluster.py
@@ -14,7 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, Optional, Sequence
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
 from airflow.sensors.base import BaseSensorOperator
@@ -49,9 +51,9 @@ class RedshiftClusterSensor(BaseSensorOperator):
         self.cluster_identifier = cluster_identifier
         self.target_status = target_status
         self.aws_conn_id = aws_conn_id
-        self.hook: Optional[RedshiftHook] = None
+        self.hook: RedshiftHook | None = None
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         self.log.info('Poking for status : %s\nfor cluster %s', self.target_status, self.cluster_identifier)
         return self.get_hook().cluster_status(self.cluster_identifier) == self.target_status
 
diff --git a/airflow/providers/amazon/aws/sensors/s3.py b/airflow/providers/amazon/aws/sensors/s3.py
index 6c24c191e4..eb4c70a335 100644
--- a/airflow/providers/amazon/aws/sensors/s3.py
+++ b/airflow/providers/amazon/aws/sensors/s3.py
@@ -15,12 +15,13 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
+from __future__ import annotations
+
 import fnmatch
 import os
 import re
 from datetime import datetime
-from typing import TYPE_CHECKING, Callable, List, Optional, Sequence, Set, Union
+from typing import TYPE_CHECKING, Callable, Sequence
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
@@ -75,12 +76,12 @@ class S3KeySensor(BaseSensorOperator):
     def __init__(
         self,
         *,
-        bucket_key: Union[str, List[str]],
-        bucket_name: Optional[str] = None,
+        bucket_key: str | list[str],
+        bucket_name: str | None = None,
         wildcard_match: bool = False,
-        check_fn: Optional[Callable[..., bool]] = None,
+        check_fn: Callable[..., bool] | None = None,
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[str, bool]] = None,
+        verify: str | bool | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -90,7 +91,7 @@ class S3KeySensor(BaseSensorOperator):
         self.check_fn = check_fn
         self.aws_conn_id = aws_conn_id
         self.verify = verify
-        self.hook: Optional[S3Hook] = None
+        self.hook: S3Hook | None = None
 
     def _check_key(self, key):
         bucket_name, key = S3Hook.get_s3_bucket_key(self.bucket_name, key, 'bucket_name', 'bucket_key')
@@ -123,7 +124,7 @@ class S3KeySensor(BaseSensorOperator):
 
         return True
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         return all(self._check_key(key) for key in self.bucket_key)
 
     def get_hook(self) -> S3Hook:
@@ -181,10 +182,10 @@ class S3KeysUnchangedSensor(BaseSensorOperator):
         bucket_name: str,
         prefix: str,
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[bool, str]] = None,
+        verify: bool | str | None = None,
         inactivity_period: float = 60 * 60,
         min_objects: int = 1,
-        previous_objects: Optional[Set[str]] = None,
+        previous_objects: set[str] | None = None,
         allow_delete: bool = True,
         **kwargs,
     ) -> None:
@@ -202,14 +203,14 @@ class S3KeysUnchangedSensor(BaseSensorOperator):
         self.allow_delete = allow_delete
         self.aws_conn_id = aws_conn_id
         self.verify = verify
-        self.last_activity_time: Optional[datetime] = None
+        self.last_activity_time: datetime | None = None
 
     @cached_property
     def hook(self):
         """Returns S3Hook."""
         return S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
 
-    def is_keys_unchanged(self, current_objects: Set[str]) -> bool:
+    def is_keys_unchanged(self, current_objects: set[str]) -> bool:
         """
         Checks whether new objects have been uploaded and the inactivity_period
         has passed and updates the state of the sensor accordingly.
@@ -273,5 +274,5 @@ class S3KeysUnchangedSensor(BaseSensorOperator):
             return False
         return False
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         return self.is_keys_unchanged(set(self.hook.list_keys(self.bucket_name, prefix=self.prefix)))
diff --git a/airflow/providers/amazon/aws/sensors/sagemaker.py b/airflow/providers/amazon/aws/sensors/sagemaker.py
index 925ddaed17..585003811c 100644
--- a/airflow/providers/amazon/aws/sensors/sagemaker.py
+++ b/airflow/providers/amazon/aws/sensors/sagemaker.py
@@ -14,9 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import time
-from typing import TYPE_CHECKING, Optional, Sequence, Set
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook
@@ -39,7 +40,7 @@ class SageMakerBaseSensor(BaseSensorOperator):
     def __init__(self, *, aws_conn_id: str = 'aws_default', **kwargs):
         super().__init__(**kwargs)
         self.aws_conn_id = aws_conn_id
-        self.hook: Optional[SageMakerHook] = None
+        self.hook: SageMakerHook | None = None
 
     def get_hook(self) -> SageMakerHook:
         """Get SageMakerHook."""
@@ -48,7 +49,7 @@ class SageMakerBaseSensor(BaseSensorOperator):
         self.hook = SageMakerHook(aws_conn_id=self.aws_conn_id)
         return self.hook
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         response = self.get_sagemaker_response()
         if response['ResponseMetadata']['HTTPStatusCode'] != 200:
             self.log.info('Bad HTTP response: %s', response)
@@ -62,11 +63,11 @@ class SageMakerBaseSensor(BaseSensorOperator):
             raise AirflowException(f'Sagemaker job failed for the following reason: {failed_reason}')
         return True
 
-    def non_terminal_states(self) -> Set[str]:
+    def non_terminal_states(self) -> set[str]:
         """Placeholder for returning states with should not terminate."""
         raise NotImplementedError('Please implement non_terminal_states() in subclass')
 
-    def failed_states(self) -> Set[str]:
+    def failed_states(self) -> set[str]:
         """Placeholder for returning states with are considered failed."""
         raise NotImplementedError('Please implement failed_states() in subclass')
 
@@ -213,8 +214,8 @@ class SageMakerTrainingSensor(SageMakerBaseSensor):
         self.print_log = print_log
         self.positions = {}
         self.stream_names = []
-        self.instance_count: Optional[int] = None
-        self.state: Optional[int] = None
+        self.instance_count: int | None = None
+        self.state: int | None = None
         self.last_description = None
         self.last_describe_job_call = None
         self.log_resource_inited = False
diff --git a/airflow/providers/amazon/aws/sensors/sqs.py b/airflow/providers/amazon/aws/sensors/sqs.py
index 190c5edc15..f813c05ab0 100644
--- a/airflow/providers/amazon/aws/sensors/sqs.py
+++ b/airflow/providers/amazon/aws/sensors/sqs.py
@@ -16,8 +16,10 @@
 # specific language governing permissions and limitations
 # under the License.
 """Reads and then deletes the message from SQS queue"""
+from __future__ import annotations
+
 import json
-from typing import TYPE_CHECKING, Any, Collection, List, Optional, Sequence
+from typing import TYPE_CHECKING, Any, Collection, Sequence
 
 from jsonpath_ng import parse
 from typing_extensions import Literal
@@ -79,8 +81,8 @@ class SqsSensor(BaseSensorOperator):
         max_messages: int = 5,
         num_batches: int = 1,
         wait_time_seconds: int = 1,
-        visibility_timeout: Optional[int] = None,
-        message_filtering: Optional[Literal["literal", "jsonpath"]] = None,
+        visibility_timeout: int | None = None,
+        message_filtering: Literal["literal", "jsonpath"] | None = None,
         message_filtering_match_values: Any = None,
         message_filtering_config: Any = None,
         delete_message_on_reception: bool = True,
@@ -109,7 +111,7 @@ class SqsSensor(BaseSensorOperator):
 
         self.message_filtering_config = message_filtering_config
 
-        self.hook: Optional[SqsHook] = None
+        self.hook: SqsHook | None = None
 
     def poll_sqs(self, sqs_conn: BaseAwsConnection) -> Collection:
         """
@@ -143,7 +145,7 @@ class SqsSensor(BaseSensorOperator):
             self.log.info("There are %d messages left after filtering", num_messages)
         return messages
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         """
         Check subscribed queue for messages and write them to xcom with the ``messages`` key.
 
@@ -152,7 +154,7 @@ class SqsSensor(BaseSensorOperator):
         """
         sqs_conn = self.get_hook().get_conn()
 
-        message_batch: List[Any] = []
+        message_batch: list[Any] = []
 
         # perform multiple SQS call to retrieve messages in series
         for _ in range(self.num_batches):
diff --git a/airflow/providers/amazon/aws/sensors/step_function.py b/airflow/providers/amazon/aws/sensors/step_function.py
index 6f82c0bc99..0cf58aeca5 100644
--- a/airflow/providers/amazon/aws/sensors/step_function.py
+++ b/airflow/providers/amazon/aws/sensors/step_function.py
@@ -14,9 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import json
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
@@ -60,16 +61,16 @@ class StepFunctionExecutionSensor(BaseSensorOperator):
         *,
         execution_arn: str,
         aws_conn_id: str = 'aws_default',
-        region_name: Optional[str] = None,
+        region_name: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
         self.execution_arn = execution_arn
         self.aws_conn_id = aws_conn_id
         self.region_name = region_name
-        self.hook: Optional[StepFunctionHook] = None
+        self.hook: StepFunctionHook | None = None
 
-    def poke(self, context: 'Context'):
+    def poke(self, context: Context):
         execution_status = self.get_hook().describe_execution(self.execution_arn)
         state = execution_status['status']
         output = json.loads(execution_status['output']) if 'output' in execution_status else None
diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
index 218f4dc16c..4b148856a3 100644
--- a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
@@ -15,17 +15,17 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
-
 """
 This module contains operators to replicate records from
 DynamoDB table to S3.
 """
+from __future__ import annotations
+
 import json
 from copy import copy
 from os.path import getsize
 from tempfile import NamedTemporaryFile
-from typing import IO, TYPE_CHECKING, Any, Callable, Dict, Optional, Sequence
+from typing import IO, TYPE_CHECKING, Any, Callable, Sequence
 from uuid import uuid4
 
 from airflow.models import BaseOperator
@@ -36,7 +36,7 @@ if TYPE_CHECKING:
     from airflow.utils.context import Context
 
 
-def _convert_item_to_json_bytes(item: Dict[str, Any]) -> bytes:
+def _convert_item_to_json_bytes(item: dict[str, Any]) -> bytes:
     return (json.dumps(item) + '\n').encode('utf-8')
 
 
@@ -93,9 +93,9 @@ class DynamoDBToS3Operator(BaseOperator):
         dynamodb_table_name: str,
         s3_bucket_name: str,
         file_size: int,
-        dynamodb_scan_kwargs: Optional[Dict[str, Any]] = None,
+        dynamodb_scan_kwargs: dict[str, Any] | None = None,
         s3_key_prefix: str = '',
-        process_func: Callable[[Dict[str, Any]], bytes] = _convert_item_to_json_bytes,
+        process_func: Callable[[dict[str, Any]], bytes] = _convert_item_to_json_bytes,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ) -> None:
@@ -108,7 +108,7 @@ class DynamoDBToS3Operator(BaseOperator):
         self.s3_key_prefix = s3_key_prefix
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         hook = DynamoDBHook(aws_conn_id=self.aws_conn_id)
         table = hook.get_conn().Table(self.dynamodb_table_name)
 
diff --git a/airflow/providers/amazon/aws/transfers/exasol_to_s3.py b/airflow/providers/amazon/aws/transfers/exasol_to_s3.py
index 0f2fb7a99e..dc14dee89a 100644
--- a/airflow/providers/amazon/aws/transfers/exasol_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/exasol_to_s3.py
@@ -16,9 +16,10 @@
 # specific language governing permissions and limitations
 # under the License.
 """Transfers data from Exasol database into a S3 Bucket."""
+from __future__ import annotations
 
 from tempfile import NamedTemporaryFile
-from typing import TYPE_CHECKING, Dict, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -59,13 +60,13 @@ class ExasolToS3Operator(BaseOperator):
         *,
         query_or_table: str,
         key: str,
-        bucket_name: Optional[str] = None,
+        bucket_name: str | None = None,
         replace: bool = False,
         encrypt: bool = False,
         gzip: bool = False,
-        acl_policy: Optional[str] = None,
-        query_params: Optional[Dict] = None,
-        export_params: Optional[Dict] = None,
+        acl_policy: str | None = None,
+        query_params: dict | None = None,
+        export_params: dict | None = None,
         exasol_conn_id: str = 'exasol_default',
         aws_conn_id: str = 'aws_default',
         **kwargs,
@@ -83,7 +84,7 @@ class ExasolToS3Operator(BaseOperator):
         self.exasol_conn_id = exasol_conn_id
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         exasol_hook = ExasolHook(exasol_conn_id=self.exasol_conn_id)
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
 
diff --git a/airflow/providers/amazon/aws/transfers/ftp_to_s3.py b/airflow/providers/amazon/aws/transfers/ftp_to_s3.py
index 1426599bc4..e6019316b4 100644
--- a/airflow/providers/amazon/aws/transfers/ftp_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/ftp_to_s3.py
@@ -15,8 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from tempfile import NamedTemporaryFile
-from typing import TYPE_CHECKING, List, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -68,14 +70,14 @@ class FTPToS3Operator(BaseOperator):
         ftp_path: str,
         s3_bucket: str,
         s3_key: str,
-        ftp_filenames: Optional[Union[str, List[str]]] = None,
-        s3_filenames: Optional[Union[str, List[str]]] = None,
+        ftp_filenames: str | list[str] | None = None,
+        s3_filenames: str | list[str] | None = None,
         ftp_conn_id: str = 'ftp_default',
         aws_conn_id: str = 'aws_default',
         replace: bool = False,
         encrypt: bool = False,
         gzip: bool = False,
-        acl_policy: Optional[str] = None,
+        acl_policy: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -90,8 +92,8 @@ class FTPToS3Operator(BaseOperator):
         self.encrypt = encrypt
         self.gzip = gzip
         self.acl_policy = acl_policy
-        self.s3_hook: Optional[S3Hook] = None
-        self.ftp_hook: Optional[FTPHook] = None
+        self.s3_hook: S3Hook | None = None
+        self.ftp_hook: FTPHook | None = None
 
     def __upload_to_s3_from_ftp(self, remote_filename, s3_file_key):
         with NamedTemporaryFile() as local_tmp_file:
@@ -110,7 +112,7 @@ class FTPToS3Operator(BaseOperator):
             )
             self.log.info('File upload to %s', s3_file_key)
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         self.ftp_hook = FTPHook(ftp_conn_id=self.ftp_conn_id)
         self.s3_hook = S3Hook(self.aws_conn_id)
 
diff --git a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
index b521ce5360..c417128064 100644
--- a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
@@ -16,9 +16,11 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module contains Google Cloud Storage to S3 operator."""
+from __future__ import annotations
+
 import os
 import warnings
-from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -95,18 +97,18 @@ class GCSToS3Operator(BaseOperator):
         self,
         *,
         bucket: str,
-        prefix: Optional[str] = None,
-        delimiter: Optional[str] = None,
+        prefix: str | None = None,
+        delimiter: str | None = None,
         gcp_conn_id: str = 'google_cloud_default',
-        google_cloud_storage_conn_id: Optional[str] = None,
-        delegate_to: Optional[str] = None,
+        google_cloud_storage_conn_id: str | None = None,
+        delegate_to: str | None = None,
         dest_aws_conn_id: str = 'aws_default',
         dest_s3_key: str,
-        dest_verify: Optional[Union[str, bool]] = None,
+        dest_verify: str | bool | None = None,
         replace: bool = False,
-        google_impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
-        dest_s3_extra_args: Optional[Dict] = None,
-        s3_acl_policy: Optional[str] = None,
+        google_impersonation_chain: str | Sequence[str] | None = None,
+        dest_s3_extra_args: dict | None = None,
+        s3_acl_policy: str | None = None,
         keep_directory_structure: bool = True,
         **kwargs,
     ) -> None:
@@ -135,7 +137,7 @@ class GCSToS3Operator(BaseOperator):
         self.s3_acl_policy = s3_acl_policy
         self.keep_directory_structure = keep_directory_structure
 
-    def execute(self, context: 'Context') -> List[str]:
+    def execute(self, context: Context) -> list[str]:
         # list all files in an Google Cloud Storage bucket
         hook = GCSHook(
             gcp_conn_id=self.gcp_conn_id,
diff --git a/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py b/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py
index 07d3410ee7..4a189230a4 100644
--- a/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py
+++ b/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py
@@ -15,8 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import tempfile
-from typing import TYPE_CHECKING, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.glacier import GlacierHook
@@ -71,8 +73,8 @@ class GlacierToGCSOperator(BaseOperator):
         object_name: str,
         gzip: bool,
         chunk_size: int = 1024,
-        delegate_to: Optional[str] = None,
-        google_impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        delegate_to: str | None = None,
+        google_impersonation_chain: str | Sequence[str] | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -86,7 +88,7 @@ class GlacierToGCSOperator(BaseOperator):
         self.delegate_to = delegate_to
         self.impersonation_chain = google_impersonation_chain
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         glacier_hook = GlacierHook(aws_conn_id=self.aws_conn_id)
         gcs_hook = GCSHook(
             gcp_conn_id=self.gcp_conn_id,
diff --git a/airflow/providers/amazon/aws/transfers/google_api_to_s3.py b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py
index f3e10b62b2..bd140e6371 100644
--- a/airflow/providers/amazon/aws/transfers/google_api_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py
@@ -15,11 +15,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-#
 """This module allows you to transfer data from any Google API endpoint into a S3 Bucket."""
+from __future__ import annotations
+
 import json
 import sys
-from typing import TYPE_CHECKING, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator, TaskInstance
 from airflow.models.xcom import MAX_XCOM_SIZE, XCOM_RETURN_KEY
@@ -100,16 +101,16 @@ class GoogleApiToS3Operator(BaseOperator):
         google_api_endpoint_path: str,
         google_api_endpoint_params: dict,
         s3_destination_key: str,
-        google_api_response_via_xcom: Optional[str] = None,
-        google_api_endpoint_params_via_xcom: Optional[str] = None,
-        google_api_endpoint_params_via_xcom_task_ids: Optional[str] = None,
+        google_api_response_via_xcom: str | None = None,
+        google_api_endpoint_params_via_xcom: str | None = None,
+        google_api_endpoint_params_via_xcom_task_ids: str | None = None,
         google_api_pagination: bool = False,
         google_api_num_retries: int = 0,
         s3_overwrite: bool = False,
         gcp_conn_id: str = 'google_cloud_default',
-        delegate_to: Optional[str] = None,
+        delegate_to: str | None = None,
         aws_conn_id: str = 'aws_default',
-        google_impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
+        google_impersonation_chain: str | Sequence[str] | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -129,7 +130,7 @@ class GoogleApiToS3Operator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.google_impersonation_chain = google_impersonation_chain
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         """
         Transfers Google APIs json data to S3.
 
diff --git a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
index 652eca22b8..f031291093 100644
--- a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
+++ b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
@@ -15,11 +15,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains operator to move data from Hive to DynamoDB."""
+from __future__ import annotations
 
 import json
-from typing import TYPE_CHECKING, Callable, Optional, Sequence
+from typing import TYPE_CHECKING, Callable, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
@@ -63,10 +63,10 @@ class HiveToDynamoDBOperator(BaseOperator):
         sql: str,
         table_name: str,
         table_keys: list,
-        pre_process: Optional[Callable] = None,
-        pre_process_args: Optional[list] = None,
-        pre_process_kwargs: Optional[list] = None,
-        region_name: Optional[str] = None,
+        pre_process: Callable | None = None,
+        pre_process_args: list | None = None,
+        pre_process_kwargs: list | None = None,
+        region_name: str | None = None,
         schema: str = 'default',
         hiveserver2_conn_id: str = 'hiveserver2_default',
         aws_conn_id: str = 'aws_default',
@@ -84,7 +84,7 @@ class HiveToDynamoDBOperator(BaseOperator):
         self.hiveserver2_conn_id = hiveserver2_conn_id
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         hive = HiveServer2Hook(hiveserver2_conn_id=self.hiveserver2_conn_id)
 
         self.log.info('Extracting data from Hive')
diff --git a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py
index e79276dbc7..b6f3e1ef3a 100644
--- a/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py
@@ -16,8 +16,10 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module allows you to transfer mail attachments from a mail server into s3 bucket."""
+from __future__ import annotations
+
 import warnings
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -64,7 +66,7 @@ class ImapAttachmentToS3Operator(BaseOperator):
         imap_mail_filter: str = 'All',
         s3_overwrite: bool = False,
         imap_conn_id: str = 'imap_default',
-        s3_conn_id: Optional[str] = None,
+        s3_conn_id: str | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ) -> None:
@@ -83,7 +85,7 @@ class ImapAttachmentToS3Operator(BaseOperator):
         self.imap_conn_id = imap_conn_id
         self.aws_conn_id = aws_conn_id
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         """
         This function executes the transfer from the email server (via imap) into s3.
 
diff --git a/airflow/providers/amazon/aws/transfers/local_to_s3.py b/airflow/providers/amazon/aws/transfers/local_to_s3.py
index d3e76dcb1d..39855bceb0 100644
--- a/airflow/providers/amazon/aws/transfers/local_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/local_to_s3.py
@@ -15,7 +15,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-from typing import TYPE_CHECKING, Optional, Sequence, Union
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -69,13 +71,13 @@ class LocalFilesystemToS3Operator(BaseOperator):
         *,
         filename: str,
         dest_key: str,
-        dest_bucket: Optional[str] = None,
+        dest_bucket: str | None = None,
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[str, bool]] = None,
+        verify: str | bool | None = None,
         replace: bool = False,
         encrypt: bool = False,
         gzip: bool = False,
-        acl_policy: Optional[str] = None,
+        acl_policy: str | None = None,
         **kwargs,
     ):
         super().__init__(**kwargs)
@@ -90,7 +92,7 @@ class LocalFilesystemToS3Operator(BaseOperator):
         self.gzip = gzip
         self.acl_policy = acl_policy
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
         s3_bucket, s3_key = s3_hook.get_s3_bucket_key(
             self.dest_bucket, self.dest_key, 'dest_bucket', 'dest_key'
diff --git a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py
index 44aae36378..7508d32548 100644
--- a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py
@@ -15,9 +15,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import json
 import warnings
-from typing import TYPE_CHECKING, Any, Iterable, Optional, Sequence, Union, cast
+from typing import TYPE_CHECKING, Any, Iterable, Sequence, cast
 
 from bson import json_util
 
@@ -64,18 +66,18 @@ class MongoToS3Operator(BaseOperator):
     def __init__(
         self,
         *,
-        s3_conn_id: Optional[str] = None,
+        s3_conn_id: str | None = None,
         mongo_conn_id: str = 'mongo_default',
         aws_conn_id: str = 'aws_default',
         mongo_collection: str,
-        mongo_query: Union[list, dict],
+        mongo_query: list | dict,
         s3_bucket: str,
         s3_key: str,
-        mongo_db: Optional[str] = None,
-        mongo_projection: Optional[Union[list, dict]] = None,
+        mongo_db: str | None = None,
+        mongo_projection: list | dict | None = None,
         replace: bool = False,
         allow_disk_use: bool = False,
-        compression: Optional[str] = None,
+        compression: str | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -99,7 +101,7 @@ class MongoToS3Operator(BaseOperator):
         self.allow_disk_use = allow_disk_use
         self.compression = compression
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """Is written to depend on transform method"""
         s3_conn = S3Hook(self.aws_conn_id)
 
diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
index b09044dd71..002eb64b7c 100644
--- a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
@@ -16,7 +16,9 @@
 # specific language governing permissions and limitations
 # under the License.
 """Transfers data from AWS Redshift into a S3 Bucket."""
-from typing import TYPE_CHECKING, Iterable, List, Mapping, Optional, Sequence, Union
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Iterable, List, Mapping, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook
@@ -84,16 +86,16 @@ class RedshiftToS3Operator(BaseOperator):
         *,
         s3_bucket: str,
         s3_key: str,
-        schema: Optional[str] = None,
-        table: Optional[str] = None,
-        select_query: Optional[str] = None,
+        schema: str | None = None,
+        table: str | None = None,
+        select_query: str | None = None,
         redshift_conn_id: str = 'redshift_default',
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[bool, str]] = None,
-        unload_options: Optional[List] = None,
+        verify: bool | str | None = None,
+        unload_options: list | None = None,
         autocommit: bool = False,
         include_header: bool = False,
-        parameters: Optional[Union[Iterable, Mapping]] = None,
+        parameters: Iterable | Mapping | None = None,
         table_as_file_name: bool = True,  # Set to True by default for not breaking current workflows
         **kwargs,
     ) -> None:
@@ -136,7 +138,7 @@ class RedshiftToS3Operator(BaseOperator):
                     {unload_options};
         """
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         redshift_hook = RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
         conn = S3Hook.get_connection(conn_id=self.aws_conn_id)
         if conn.extra_dejson.get('role_arn', False):
diff --git a/airflow/providers/amazon/aws/transfers/s3_to_ftp.py b/airflow/providers/amazon/aws/transfers/s3_to_ftp.py
index 2e07a9575f..b0f9b3b9aa 100644
--- a/airflow/providers/amazon/aws/transfers/s3_to_ftp.py
+++ b/airflow/providers/amazon/aws/transfers/s3_to_ftp.py
@@ -15,6 +15,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from tempfile import NamedTemporaryFile
 from typing import TYPE_CHECKING, Sequence
@@ -65,7 +66,7 @@ class S3ToFTPOperator(BaseOperator):
         self.aws_conn_id = aws_conn_id
         self.ftp_conn_id = ftp_conn_id
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         s3_hook = S3Hook(self.aws_conn_id)
         ftp_hook = FTPHook(ftp_conn_id=self.ftp_conn_id)
 
diff --git a/airflow/providers/amazon/aws/transfers/s3_to_redshift.py b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py
index e0de981d7e..8e46b25510 100644
--- a/airflow/providers/amazon/aws/transfers/s3_to_redshift.py
+++ b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py
@@ -14,8 +14,9 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Iterable, Sequence
 
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
@@ -76,12 +77,12 @@ class S3ToRedshiftOperator(BaseOperator):
         s3_key: str,
         redshift_conn_id: str = 'redshift_default',
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[bool, str]] = None,
-        column_list: Optional[List[str]] = None,
-        copy_options: Optional[List] = None,
+        verify: bool | str | None = None,
+        column_list: list[str] | None = None,
+        copy_options: list | None = None,
         autocommit: bool = False,
         method: str = 'APPEND',
-        upsert_keys: Optional[List[str]] = None,
+        upsert_keys: list[str] | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -111,7 +112,7 @@ class S3ToRedshiftOperator(BaseOperator):
                     {copy_options};
         """
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         redshift_hook = RedshiftSQLHook(redshift_conn_id=self.redshift_conn_id)
         conn = S3Hook.get_connection(conn_id=self.aws_conn_id)
 
@@ -128,7 +129,7 @@ class S3ToRedshiftOperator(BaseOperator):
 
         copy_statement = self._build_copy_query(copy_destination, credentials_block, copy_options)
 
-        sql: Union[str, Iterable[str]]
+        sql: str | Iterable[str]
 
         if self.method == 'REPLACE':
             sql = ["BEGIN;", f"DELETE FROM {destination};", copy_statement, "COMMIT"]
diff --git a/airflow/providers/amazon/aws/transfers/s3_to_sftp.py b/airflow/providers/amazon/aws/transfers/s3_to_sftp.py
index 7c003cfb72..8e1fb81591 100644
--- a/airflow/providers/amazon/aws/transfers/s3_to_sftp.py
+++ b/airflow/providers/amazon/aws/transfers/s3_to_sftp.py
@@ -15,9 +15,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 import warnings
 from tempfile import NamedTemporaryFile
-from typing import TYPE_CHECKING, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 from urllib.parse import urlparse
 
 from airflow.models import BaseOperator
@@ -60,7 +62,7 @@ class S3ToSFTPOperator(BaseOperator):
         s3_key: str,
         sftp_path: str,
         sftp_conn_id: str = 'ssh_default',
-        s3_conn_id: Optional[str] = None,
+        s3_conn_id: str | None = None,
         aws_conn_id: str = 'aws_default',
         **kwargs,
     ) -> None:
@@ -81,7 +83,7 @@ class S3ToSFTPOperator(BaseOperator):
         parsed_s3_key = urlparse(s3_key)
         return parsed_s3_key.path.lstrip('/')
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         self.s3_key = self.get_s3_key(self.s3_key)
         ssh_hook = SSHHook(ssh_conn_id=self.sftp_conn_id)
         s3_hook = S3Hook(self.aws_conn_id)
diff --git a/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py b/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py
index a953693f1f..f0d9c82f3f 100644
--- a/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py
@@ -14,10 +14,11 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import os
 import tempfile
-from typing import TYPE_CHECKING, Dict, Optional, Sequence
+from typing import TYPE_CHECKING, Sequence
 
 from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -70,7 +71,7 @@ class SalesforceToS3Operator(BaseOperator):
         s3_key: str,
         salesforce_conn_id: str,
         export_format: str = "csv",
-        query_params: Optional[Dict] = None,
+        query_params: dict | None = None,
         include_deleted: bool = False,
         coerce_to_timestamp: bool = False,
         record_time_added: bool = False,
@@ -78,7 +79,7 @@ class SalesforceToS3Operator(BaseOperator):
         replace: bool = False,
         encrypt: bool = False,
         gzip: bool = False,
-        acl_policy: Optional[str] = None,
+        acl_policy: str | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -97,7 +98,7 @@ class SalesforceToS3Operator(BaseOperator):
         self.gzip = gzip
         self.acl_policy = acl_policy
 
-    def execute(self, context: 'Context') -> str:
+    def execute(self, context: Context) -> str:
         salesforce_hook = SalesforceHook(salesforce_conn_id=self.salesforce_conn_id)
         response = salesforce_hook.make_query(
             query=self.salesforce_query,
diff --git a/airflow/providers/amazon/aws/transfers/sftp_to_s3.py b/airflow/providers/amazon/aws/transfers/sftp_to_s3.py
index 71376e3179..23713ad1f4 100644
--- a/airflow/providers/amazon/aws/transfers/sftp_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/sftp_to_s3.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
+
 from tempfile import NamedTemporaryFile
 from typing import TYPE_CHECKING, Sequence
 from urllib.parse import urlparse
@@ -77,7 +79,7 @@ class SFTPToS3Operator(BaseOperator):
         parsed_s3_key = urlparse(s3_key)
         return parsed_s3_key.path.lstrip('/')
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         self.s3_key = self.get_s3_key(self.s3_key)
         ssh_hook = SSHHook(ssh_conn_id=self.sftp_conn_id)
         s3_hook = S3Hook(self.s3_conn_id)
diff --git a/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/airflow/providers/amazon/aws/transfers/sql_to_s3.py
index 5fb02a0f2f..dfaf6b7973 100644
--- a/airflow/providers/amazon/aws/transfers/sql_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/sql_to_s3.py
@@ -15,11 +15,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import enum
 from collections import namedtuple
 from tempfile import NamedTemporaryFile
-from typing import TYPE_CHECKING, Iterable, Mapping, Optional, Sequence, Union
+from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
 
 import numpy as np
 import pandas as pd
@@ -99,12 +100,12 @@ class SqlToS3Operator(BaseOperator):
         s3_bucket: str,
         s3_key: str,
         sql_conn_id: str,
-        parameters: Union[None, Mapping, Iterable] = None,
+        parameters: None | Mapping | Iterable = None,
         replace: bool = False,
         aws_conn_id: str = 'aws_default',
-        verify: Optional[Union[bool, str]] = None,
+        verify: bool | str | None = None,
         file_format: Literal['csv', 'json', 'parquet'] = 'csv',
-        pd_kwargs: Optional[dict] = None,
+        pd_kwargs: dict | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -151,7 +152,7 @@ class SqlToS3Operator(BaseOperator):
                     df[col] = np.where(df[col].isnull(), None, df[col])
                     df[col] = df[col].astype(pd.Float64Dtype())
 
-    def execute(self, context: 'Context') -> None:
+    def execute(self, context: Context) -> None:
         sql_hook = self._get_hook()
         s3_conn = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
         data_df = sql_hook.get_pandas_df(sql=self.query, parameters=self.parameters)
diff --git a/airflow/providers/amazon/aws/utils/__init__.py b/airflow/providers/amazon/aws/utils/__init__.py
index 251276e738..a2223dd01f 100644
--- a/airflow/providers/amazon/aws/utils/__init__.py
+++ b/airflow/providers/amazon/aws/utils/__init__.py
@@ -14,10 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import re
 from datetime import datetime
-from typing import Tuple
 
 from airflow.version import version
 
@@ -41,6 +41,6 @@ def datetime_to_epoch_us(date_time: datetime) -> int:
     return int(date_time.timestamp() * 1_000_000)
 
 
-def get_airflow_version() -> Tuple[int, ...]:
+def get_airflow_version() -> tuple[int, ...]:
     val = re.sub(r'(\d+\.\d+\.\d+).*', lambda x: x.group(1), version)
     return tuple(int(x) for x in val.split('.'))
diff --git a/airflow/providers/amazon/aws/utils/connection_wrapper.py b/airflow/providers/amazon/aws/utils/connection_wrapper.py
index d23ba7771b..15db28a24b 100644
--- a/airflow/providers/amazon/aws/utils/connection_wrapper.py
+++ b/airflow/providers/amazon/aws/utils/connection_wrapper.py
@@ -14,11 +14,12 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import warnings
 from copy import deepcopy
 from dataclasses import MISSING, InitVar, dataclass, field, fields
-from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any
 
 from botocore.config import Config
 
@@ -60,40 +61,40 @@ class AwsConnectionWrapper(LoggingMixin):
         3. The wrapper's default value
     """
 
-    conn: InitVar[Optional[Union["Connection", "AwsConnectionWrapper"]]]
-    region_name: Optional[str] = field(default=None)
+    conn: InitVar[Connection | AwsConnectionWrapper | None]
+    region_name: str | None = field(default=None)
     # boto3 client/resource configs
-    botocore_config: Optional[Config] = field(default=None)
-    verify: Optional[Union[bool, str]] = field(default=None)
+    botocore_config: Config | None = field(default=None)
+    verify: bool | str | None = field(default=None)
 
     # Reference to Airflow Connection attributes
     # ``extra_config`` contains original Airflow Connection Extra.
-    conn_id: Optional[Union[str, ArgNotSet]] = field(init=False, default=NOTSET)
-    conn_type: Optional[str] = field(init=False, default=None)
-    login: Optional[str] = field(init=False, repr=False, default=None)
-    password: Optional[str] = field(init=False, repr=False, default=None)
-    extra_config: Dict[str, Any] = field(init=False, repr=False, default_factory=dict)
+    conn_id: str | ArgNotSet | None = field(init=False, default=NOTSET)
+    conn_type: str | None = field(init=False, default=None)
+    login: str | None = field(init=False, repr=False, default=None)
+    password: str | None = field(init=False, repr=False, default=None)
+    extra_config: dict[str, Any] = field(init=False, repr=False, default_factory=dict)
 
     # AWS Credentials from connection.
-    aws_access_key_id: Optional[str] = field(init=False, default=None)
-    aws_secret_access_key: Optional[str] = field(init=False, default=None)
-    aws_session_token: Optional[str] = field(init=False, default=None)
+    aws_access_key_id: str | None = field(init=False, default=None)
+    aws_secret_access_key: str | None = field(init=False, default=None)
+    aws_session_token: str | None = field(init=False, default=None)
 
     # AWS Shared Credential profile_name
-    profile_name: Optional[str] = field(init=False, default=None)
+    profile_name: str | None = field(init=False, default=None)
     # Custom endpoint_url for boto3.client and boto3.resource
-    endpoint_url: Optional[str] = field(init=False, default=None)
+    endpoint_url: str | None = field(init=False, default=None)
 
     # Assume Role Configurations
-    role_arn: Optional[str] = field(init=False, default=None)
-    assume_role_method: Optional[str] = field(init=False, default=None)
-    assume_role_kwargs: Dict[str, Any] = field(init=False, default_factory=dict)
+    role_arn: str | None = field(init=False, default=None)
+    assume_role_method: str | None = field(init=False, default=None)
+    assume_role_kwargs: dict[str, Any] = field(init=False, default_factory=dict)
 
     @cached_property
     def conn_repr(self):
         return f"AWS Connection (conn_id={self.conn_id!r}, conn_type={self.conn_type!r})"
 
-    def __post_init__(self, conn: "Connection"):
+    def __post_init__(self, conn: Connection):
         if isinstance(conn, type(self)):
             # For every field with init=False we copy reference value from original wrapper
             # For every field with init=True we use init values if it not equal default
@@ -218,10 +219,10 @@ class AwsConnectionWrapper(LoggingMixin):
     @classmethod
     def from_connection_metadata(
         cls,
-        conn_id: Optional[str] = None,
-        login: Optional[str] = None,
-        password: Optional[str] = None,
-        extra: Optional[Dict[str, Any]] = None,
+        conn_id: str | None = None,
+        login: str | None = None,
+        password: str | None = None,
+        extra: dict[str, Any] | None = None,
     ):
         """
         Create config from connection metadata.
@@ -243,7 +244,7 @@ class AwsConnectionWrapper(LoggingMixin):
         return self.extra_config
 
     @property
-    def session_kwargs(self) -> Dict[str, Any]:
+    def session_kwargs(self) -> dict[str, Any]:
         """Additional kwargs passed to boto3.session.Session."""
         return trim_none_values(
             {
@@ -261,16 +262,16 @@ class AwsConnectionWrapper(LoggingMixin):
     def _get_credentials(
         self,
         *,
-        aws_access_key_id: Optional[str] = None,
-        aws_secret_access_key: Optional[str] = None,
-        aws_session_token: Optional[str] = None,
+        aws_access_key_id: str | None = None,
+        aws_secret_access_key: str | None = None,
+        aws_session_token: str | None = None,
         # Deprecated Values
-        s3_config_file: Optional[str] = None,
-        s3_config_format: Optional[str] = None,
-        profile: Optional[str] = None,
-        session_kwargs: Optional[Dict[str, Any]] = None,
+        s3_config_file: str | None = None,
+        s3_config_format: str | None = None,
+        profile: str | None = None,
+        session_kwargs: dict[str, Any] | None = None,
         **kwargs,
-    ) -> Tuple[Optional[str], Optional[str], Optional[str]]:
+    ) -> tuple[str | None, str | None, str | None]:
         """
         Get AWS credentials from connection login/password and extra.
 
@@ -323,15 +324,15 @@ class AwsConnectionWrapper(LoggingMixin):
     def _get_assume_role_configs(
         self,
         *,
-        role_arn: Optional[str] = None,
+        role_arn: str | None = None,
         assume_role_method: str = "assume_role",
-        assume_role_kwargs: Optional[Dict[str, Any]] = None,
+        assume_role_kwargs: dict[str, Any] | None = None,
         # Deprecated Values
-        aws_account_id: Optional[str] = None,
-        aws_iam_role: Optional[str] = None,
-        external_id: Optional[str] = None,
+        aws_account_id: str | None = None,
+        aws_iam_role: str | None = None,
+        external_id: str | None = None,
         **kwargs,
-    ) -> Tuple[Optional[str], Optional[str], Dict[Any, str]]:
+    ) -> tuple[str | None, str | None, dict[Any, str]]:
         """Get assume role configs from Connection extra."""
         if role_arn:
             self.log.debug("Retrieving role_arn=%r from %s extra.", role_arn, self.conn_repr)
@@ -377,8 +378,8 @@ class AwsConnectionWrapper(LoggingMixin):
 
 
 def _parse_s3_config(
-    config_file_name: str, config_format: Optional[str] = "boto", profile: Optional[str] = None
-) -> Tuple[Optional[str], Optional[str]]:
+    config_file_name: str, config_format: str | None = "boto", profile: str | None = None
+) -> tuple[str | None, str | None]:
     """
     Parses a config file for s3 credentials. Can currently
     parse boto, s3cmd.conf and AWS SDK config formats
diff --git a/airflow/providers/amazon/aws/utils/eks_get_token.py b/airflow/providers/amazon/aws/utils/eks_get_token.py
index d9422b35e3..4195ad9a7e 100644
--- a/airflow/providers/amazon/aws/utils/eks_get_token.py
+++ b/airflow/providers/amazon/aws/utils/eks_get_token.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import argparse
 import json
diff --git a/airflow/providers/amazon/aws/utils/emailer.py b/airflow/providers/amazon/aws/utils/emailer.py
index 7f0356a3a8..ac774f0393 100644
--- a/airflow/providers/amazon/aws/utils/emailer.py
+++ b/airflow/providers/amazon/aws/utils/emailer.py
@@ -16,23 +16,25 @@
 # specific language governing permissions and limitations
 # under the License.
 """Airflow module for email backend using AWS SES"""
-from typing import Any, Dict, List, Optional, Union
+from __future__ import annotations
+
+from typing import Any
 
 from airflow.providers.amazon.aws.hooks.ses import SesHook
 
 
 def send_email(
-    to: Union[List[str], str],
+    to: list[str] | str,
     subject: str,
     html_content: str,
-    files: Optional[List] = None,
-    cc: Optional[Union[List[str], str]] = None,
-    bcc: Optional[Union[List[str], str]] = None,
+    files: list | None = None,
+    cc: list[str] | str | None = None,
+    bcc: list[str] | str | None = None,
     mime_subtype: str = 'mixed',
     mime_charset: str = 'utf-8',
     conn_id: str = 'aws_default',
-    from_email: Optional[str] = None,
-    custom_headers: Optional[Dict[str, Any]] = None,
+    from_email: str | None = None,
+    custom_headers: dict[str, Any] | None = None,
     **kwargs,
 ) -> None:
     """Email backend for SES."""
diff --git a/airflow/providers/amazon/aws/utils/rds.py b/airflow/providers/amazon/aws/utils/rds.py
index 154f65b556..873f2cf83e 100644
--- a/airflow/providers/amazon/aws/utils/rds.py
+++ b/airflow/providers/amazon/aws/utils/rds.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 from enum import Enum
 
diff --git a/airflow/providers/amazon/aws/utils/redshift.py b/airflow/providers/amazon/aws/utils/redshift.py
index bb64c9b46f..d931cb0474 100644
--- a/airflow/providers/amazon/aws/utils/redshift.py
+++ b/airflow/providers/amazon/aws/utils/redshift.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+from __future__ import annotations
 
 import logging
 
diff --git a/airflow/providers/apache/beam/hooks/beam.py b/airflow/providers/apache/beam/hooks/beam.py
index 0644e02b62..e1191073c6 100644
--- a/airflow/providers/apache/beam/hooks/beam.py
+++ b/airflow/providers/apache/beam/hooks/beam.py
@@ -16,6 +16,8 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module contains a Apache Beam Hook."""
+from __future__ import annotations
+
 import json
 import os
 import select
@@ -24,7 +26,7 @@ import shutil
 import subprocess
 import textwrap
 from tempfile import TemporaryDirectory
-from typing import Callable, List, Optional
+from typing import Callable
 
 from airflow.exceptions import AirflowConfigException, AirflowException
 from airflow.hooks.base import BaseHook
@@ -50,7 +52,7 @@ class BeamRunnerType:
     Twister2Runner = "Twister2Runner"
 
 
-def beam_options_to_args(options: dict) -> List[str]:
+def beam_options_to_args(options: dict) -> list[str]:
     """
     Returns a formatted pipeline options from a dictionary of arguments
 
@@ -65,7 +67,7 @@ def beam_options_to_args(options: dict) -> List[str]:
     if not options:
         return []
 
-    args: List[str] = []
+    args: list[str] = []
     for attr, value in options.items():
         if value is None or (isinstance(value, bool) and value):
             args.append(f"--{attr}")
@@ -88,14 +90,14 @@ class BeamCommandRunner(LoggingMixin):
 
     def __init__(
         self,
-        cmd: List[str],
-        process_line_callback: Optional[Callable[[str], None]] = None,
-        working_directory: Optional[str] = None,
+        cmd: list[str],
+        process_line_callback: Callable[[str], None] | None = None,
+        working_directory: str | None = None,
     ) -> None:
         super().__init__()
         self.log.info("Running command: %s", " ".join(shlex.quote(c) for c in cmd))
         self.process_line_callback = process_line_callback
-        self.job_id: Optional[str] = None
+        self.job_id: str | None = None
 
         self._proc = subprocess.Popen(
             cmd,
@@ -173,9 +175,9 @@ class BeamHook(BaseHook):
     def _start_pipeline(
         self,
         variables: dict,
-        command_prefix: List[str],
-        process_line_callback: Optional[Callable[[str], None]] = None,
-        working_directory: Optional[str] = None,
+        command_prefix: list[str],
+        process_line_callback: Callable[[str], None] | None = None,
+        working_directory: str | None = None,
     ) -> None:
         cmd = command_prefix + [
             f"--runner={self.runner}",
@@ -193,11 +195,11 @@ class BeamHook(BaseHook):
         self,
         variables: dict,
         py_file: str,
-        py_options: List[str],
+        py_options: list[str],
         py_interpreter: str = "python3",
-        py_requirements: Optional[List[str]] = None,
+        py_requirements: list[str] | None = None,
         py_system_site_packages: bool = False,
-        process_line_callback: Optional[Callable[[str], None]] = None,
+        process_line_callback: Callable[[str], None] | None = None,
     ):
         """
         Starts Apache Beam python pipeline.
@@ -266,8 +268,8 @@ class BeamHook(BaseHook):
         self,
         variables: dict,
         jar: str,
-        job_class: Optional[str] = None,
-        process_line_callback: Optional[Callable[[str], None]] = None,
+        job_class: str | None = None,
+        process_line_callback: Callable[[str], None] | None = None,
     ) -> None:
         """
         Starts Apache Beam Java pipeline.
@@ -292,7 +294,7 @@ class BeamHook(BaseHook):
         self,
         variables: dict,
         go_file: str,
-        process_line_callback: Optional[Callable[[str], None]] = None,
+        process_line_callback: Callable[[str], None] | None = None,
         should_init_module: bool = False,
     ) -> None:
         """
diff --git a/airflow/providers/apache/beam/operators/beam.py b/airflow/providers/apache/beam/operators/beam.py
index 2ee6d180de..5d29805c32 100644
--- a/airflow/providers/apache/beam/operators/beam.py
+++ b/airflow/providers/apache/beam/operators/beam.py
@@ -16,11 +16,13 @@
 # specific language governing permissions and limitations
 # under the License.
 """This module contains Apache Beam operators."""
+from __future__ import annotations
+
 import copy
 import tempfile
 from abc import ABC, ABCMeta
 from contextlib import ExitStack
-from typing import TYPE_CHECKING, Callable, List, Optional, Sequence, Tuple, Union
+from typing import TYPE_CHECKING, Callable, Sequence
 
 from airflow import AirflowException
 from airflow.models import BaseOperator
@@ -47,17 +49,17 @@ class BeamDataflowMixin(metaclass=ABCMeta):
     :class:`~airflow.providers.apache.beam.operators.beam.BeamRunGoPipelineOperator`.
     """
 
-    dataflow_hook: Optional[DataflowHook]
+    dataflow_hook: DataflowHook | None
     dataflow_config: DataflowConfiguration
     gcp_conn_id: str
-    delegate_to: Optional[str]
+    delegate_to: str | None
     dataflow_support_impersonation: bool = True
 
     def _set_dataflow(
         self,
         pipeline_options: dict,
-        job_name_variable_key: Optional[str] = None,
-    ) -> Tuple[str, dict, Callable[[str], None]]:
+        job_name_variable_key: str | None = None,
+    ) -> tuple[str, dict, Callable[[str], None]]:
         self.dataflow_hook = self.__set_dataflow_hook()
         self.dataflow_config.project_id = self.dataflow_config.project_id or self.dataflow_hook.project_id
         dataflow_job_name = self.__get_dataflow_job_name()
@@ -85,7 +87,7 @@ class BeamDataflowMixin(metaclass=ABCMeta):
         )
 
     def __get_dataflow_pipeline_options(
-        self, pipeline_options: dict, job_name: str, job_name_key: Optional[str] = None
+        self, pipeline_options: dict, job_name: str, job_name_key: str | None = None
     ) -> dict:
         pipeline_options = copy.deepcopy(pipeline_options)
         if job_name_key is not None:
@@ -151,11 +153,11 @@ class BeamBasePipelineOperator(BaseOperator, BeamDataflowMixin, ABC):
         self,
         *,
         runner: str = "DirectRunner",
-        default_pipeline_options: Optional[dict] = None,
-        pipeline_options: Optional[dict] = None,
+        default_pipeline_options: dict | None = None,
+        pipeline_options: dict | None = None,
         gcp_conn_id: str = "google_cloud_default",
-        delegate_to: Optional[str] = None,
-        dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None,
+        delegate_to: str | None = None,
+        dataflow_config: DataflowConfiguration | dict | None = None,
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
@@ -168,9 +170,9 @@ class BeamBasePipelineOperator(BaseOperator, BeamDataflowMixin, ABC):
             self.dataflow_config = DataflowConfiguration(**dataflow_config)
         else:
             self.dataflow_config = dataflow_config or DataflowConfiguration()
-        self.beam_hook: Optional[BeamHook] = None
-        self.dataflow_hook: Optional[DataflowHook] = None
-        self.dataflow_job_id: Optional[str] = None
+        self.beam_hook: BeamHook | None = None
+        self.dataflow_hook: DataflowHook | None = None
+        self.dataflow_job_id: str | None = None
 
         if self.dataflow_config and self.runner.lower() != BeamRunnerType.DataflowRunner.lower():
             self.log.warning(
@@ -180,13 +182,13 @@ class BeamBasePipelineOperator(BaseOperator, BeamDataflowMixin, ABC):
     def _init_pipeline_options(
         self,
         format_pipeline_options: bool = False,
-        job_name_variable_key: Optional[str] = None,
-    ) -> Tuple[bool, Optional[str], dict, Optional[Callable[[str], None]]]:
+        job_name_variable_key: str | None = None,
+    ) -> tuple[bool, str | None, dict, Callable[[str], None] | None]:
         self.beam_hook = BeamHook(runner=self.runner)
         pipeline_options = self.default_pipeline_options.copy()
-        process_line_callback: Optional[Callable[[str], None]] = None
+        process_line_callback: Callable[[str], None] | None = None
         is_dataflow = self.runner.lower() == BeamRunnerType.DataflowRunner.lower()
-        dataflow_job_name: Optional[str] = None
+        dataflow_job_name: str | None = None
         if is_dataflow:
             dataflow_job_name, pipeline_options, process_line_callback = self._set_dataflow(
                 pipeline_options=pipeline_options,
@@ -255,15 +257,15 @@ class BeamRunPythonPipelineOperator(BeamBasePipelineOperator):
         *,
         py_file: str,
         runner: str = "DirectRunner",
-        default_pipeline_options: Optional[dict] = None,
-        pipeline_options: Optional[dict] = None,
+        default_pipeline_options: dict | None = None,
+        pipeline_options: dict | None = None,
         py_interpreter: str = "python3",
-        py_options: Optional[List[str]] = None,
-        py_requirements: Optional[List[str]] = None,
+        py_options: list[str] | None = None,
+        py_requirements: list[str] | None = None,
         py_system_site_packages: bool = False,
         gcp_conn_id: str = "google_cloud_default",
-        delegate_to: Optional[str] = None,
-        dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None,
+        delegate_to: str | None = None,
+        dataflow_config: DataflowConfiguration | dict | None = None,
         **kwargs,
     ) -> None:
         super().__init__(
@@ -285,7 +287,7 @@ class BeamRunPythonPipelineOperator(BeamBasePipelineOperator):
             {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
         )
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """Execute the Apache Beam Pipeline."""
         (
             is_dataflow,
@@ -396,12 +398,12 @@ class BeamRunJavaPipelineOperator(BeamBasePipelineOperator):
         *,
         jar: str,
         runner: str = "DirectRunner",
-        job_class: Optional[str] = None,
-        default_pipeline_options: Optional[dict] = None,
-        pipeline_options: Optional[dict] = None,
+        job_class: str | None = None,
+        default_pipeline_options: dict | None = None,
+        pipeline_options: dict | None = None,
         gcp_conn_id: str = "google_cloud_default",
-        delegate_to: Optional[str] = None,
-        dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None,
+        delegate_to: str | None = None,
+        dataflow_config: DataflowConfiguration | dict | None = None,
         **kwargs,
     ) -> None:
         super().__init__(
@@ -416,7 +418,7 @@ class BeamRunJavaPipelineOperator(BeamBasePipelineOperator):
         self.jar = jar
         self.job_class = job_class
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """Execute the Apache Beam Pipeline."""
         (
             is_dataflow,
@@ -537,11 +539,11 @@ class BeamRunGoPipelineOperator(BeamBasePipelineOperator):
         *,
         go_file: str,
         runner: str = "DirectRunner",
-        default_pipeline_options: Optional[dict] = None,
-        pipeline_options: Optional[dict] = None,
+        default_pipeline_options: dict | None = None,
+        pipeline_options: dict | None = None,
         gcp_conn_id: str = "google_cloud_default",
-        delegate_to: Optional[str] = None,
-        dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None,
+        delegate_to: str | None = None,
+        dataflow_config: DataflowConfiguration | dict | None = None,
         **kwargs,
     ) -> None:
         super().__init__(
@@ -567,7 +569,7 @@ class BeamRunGoPipelineOperator(BeamBasePipelineOperator):
             {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
         )
 
-    def execute(self, context: 'Context'):
+    def execute(self, context: Context):
         """Execute the Apache Beam Pipeline."""
         (
             is_dataflow,
diff --git a/airflow/providers/apache/cassandra/hooks/cassandra.py b/airflow/providers/apache/cassandra/hooks/cassandra.py
index 71c360ec47..c0d3484f18 100644
--- a/airflow/providers/apache/cassandra/hooks/cassandra.py
+++ b/airflow/providers/apache/cassandra/hooks/cassandra.py
@@ -15,10 +15,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-
 """This module contains hook to integrate with Apache Cassandra."""
+from __future__ import annotations
 
-from typing import Any, Dict, Union
+from typing import Any, Union
 
 from cassandra.auth import PlainTextAuthProvider
 from cassandra.cluster import Cluster, Session
@@ -141,7 +141,7 @@ class CassandraHook(BaseHook, LoggingMixin):
             self.cluster.shutdown()
 
     @staticmethod
-    def get_lb_policy(policy_name: str, policy_args: Dict[str, Any]) -> Policy:
+    def get_lb_policy(policy_name: str, policy_args: dict[str, Any]) -> Policy:
         """
         Creates load balancing policy.
 
@@ -188,7 +188,7 @@ class CassandraHook(BaseHook, LoggingMixin):
         cluster_metadata = self.get_conn().cluster.metadata
         return keyspace in cluster_metadata.keyspaces and table in cluster_metadata.keyspaces[keyspace].tables
 
-    def record_exists(self, table: str, keys: Dict[str, str]) -> bool:
+    def record_exists(self, table: str, keys: dict[str, str]) -> bool:
         """
         Checks if a record exists in Cassandra
 
diff --git a/airflow/providers/apache/cassandra/sensors/record.py b/airflow/providers/apache/cassandra/sensors/record.py
index f0a407297a..f90283d61e 100644
--- a/airflow/providers/apache/cassandra/sensors/record.py
+++ b/airflow/providers/apache/cassandra/sensors/record.py
@@ -19,8 +19,9 @@
 This module contains sensor that check the existence
 of a record in a Cassandra cluster.
 """
+from __future__ import annotations
 
-from typing import TYPE_CHECKING, Any, Dict, Sequence
+from typing import TYPE_CHECKING, Any, Sequence
 
 from airflow.providers.apache.cassandra.hooks.cassandra import CassandraHook
 from airflow.sensors.base import BaseSensorOperator
@@ -58,7 +59,7 @@ class CassandraRecordSensor(BaseSensorOperator):
     def __init__(
         self,
         *,
... 54050 lines suppressed ...