You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by jh...@apache.org on 2021/07/08 18:21:50 UTC

[airflow] branch v2-1-test updated (83863ba -> 6df6369)

This is an automated email from the ASF dual-hosted git repository.

jhtimmins pushed a change to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git.


    from 83863ba  Update release date and changelog for 2.1.2.
     new 826b5c5  Add Python 3.9 support (#15515) (#16883)
     new e245360  Introduce compat shim airflow.compat.functools (#15969)
     new 6df6369  Removes pylint from our toolchain (#16682)

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .dockerignore                                      |   2 -
 .github/boring-cyborg.yml                          |   2 +-
 .github/workflows/ci.yml                           |  73 +--
 .pre-commit-config.yaml                            |  63 +--
 .rat-excludes                                      |   3 -
 BREEZE.rst                                         |  44 +-
 CI.rst                                             |   8 +-
 CONTRIBUTING.rst                                   |   4 +-
 CONTRIBUTORS_QUICK_START.rst                       |  16 +-
 Dockerfile                                         |   7 +-
 Dockerfile.ci                                      |   8 +-
 IMAGES.rst                                         |   2 +-
 LOCAL_VIRTUALENV.rst                               |   6 +-
 PULL_REQUEST_WORKFLOW.rst                          |   8 +-
 README.md                                          |   4 +-
 STATIC_CODE_CHECKS.rst                             | 116 +---
 airflow/__init__.py                                |  13 +-
 airflow/api/auth/backend/basic_auth.py             |   2 +-
 airflow/api/auth/backend/default.py                |   2 +-
 airflow/api/auth/backend/deny_all.py               |   4 +-
 airflow/api/auth/backend/kerberos_auth.py          |   6 +-
 airflow/api/client/json_client.py                  |   4 +-
 airflow/api/common/experimental/delete_dag.py      |   2 +-
 airflow/api/common/experimental/mark_tasks.py      |   9 +-
 airflow/api/common/experimental/trigger_dag.py     |   2 +-
 .../api_connexion/endpoints/dag_run_endpoint.py    |   4 +-
 airflow/api_connexion/endpoints/health_endpoint.py |   2 +-
 .../endpoints/task_instance_endpoint.py            |   2 +-
 airflow/api_connexion/parameters.py                |   2 +-
 airflow/api_connexion/schemas/common_schema.py     |   4 +-
 airflow/api_connexion/schemas/connection_schema.py |   2 +-
 airflow/api_connexion/schemas/error_schema.py      |   2 +-
 airflow/api_connexion/security.py                  |   2 +-
 airflow/cli/cli_parser.py                          |  13 +-
 airflow/cli/commands/celery_command.py             |   2 +-
 airflow/cli/commands/connection_command.py         |   3 +-
 airflow/cli/commands/info_command.py               |   2 +-
 airflow/cli/commands/kubernetes_command.py         |   7 +-
 airflow/cli/commands/plugins_command.py            |   2 +-
 airflow/cli/commands/pool_command.py               |   2 +-
 airflow/cli/commands/role_command.py               |   4 +-
 airflow/cli/commands/sync_perm_command.py          |   4 +-
 airflow/cli/commands/task_command.py               |   4 +-
 airflow/cli/commands/user_command.py               |  19 +-
 airflow/cli/commands/variable_command.py           |   4 +-
 airflow/cli/commands/webserver_command.py          |   6 +-
 airflow/cli/simple_table.py                        |   5 +-
 .../zendesk/hooks => airflow/compat}/__init__.py   |   0
 .../compat/functools.py                            |  20 +-
 airflow/configuration.py                           |  31 +-
 airflow/contrib/hooks/aws_athena_hook.py           |   1 -
 airflow/contrib/hooks/aws_datasync_hook.py         |   1 -
 airflow/contrib/hooks/aws_dynamodb_hook.py         |   1 -
 airflow/contrib/hooks/aws_firehose_hook.py         |   1 -
 airflow/contrib/hooks/aws_glue_catalog_hook.py     |   1 -
 airflow/contrib/hooks/aws_hook.py                  |   1 -
 airflow/contrib/hooks/aws_lambda_hook.py           |   1 -
 airflow/contrib/hooks/aws_logs_hook.py             |   1 -
 airflow/contrib/hooks/aws_sns_hook.py              |   1 -
 airflow/contrib/hooks/aws_sqs_hook.py              |   1 -
 .../contrib/hooks/azure_container_instance_hook.py |   1 -
 .../contrib/hooks/azure_container_registry_hook.py |   1 -
 .../contrib/hooks/azure_container_volume_hook.py   |   1 -
 airflow/contrib/hooks/azure_cosmos_hook.py         |   1 -
 airflow/contrib/hooks/azure_data_lake_hook.py      |   1 -
 airflow/contrib/hooks/azure_fileshare_hook.py      |   1 -
 airflow/contrib/hooks/bigquery_hook.py             |   1 -
 airflow/contrib/hooks/cassandra_hook.py            |   1 -
 airflow/contrib/hooks/cloudant_hook.py             |   1 -
 airflow/contrib/hooks/databricks_hook.py           |   1 -
 airflow/contrib/hooks/datadog_hook.py              |   1 -
 airflow/contrib/hooks/datastore_hook.py            |   1 -
 airflow/contrib/hooks/dingding_hook.py             |   1 -
 airflow/contrib/hooks/discord_webhook_hook.py      |   1 -
 airflow/contrib/hooks/emr_hook.py                  |   1 -
 airflow/contrib/hooks/fs_hook.py                   |   1 -
 airflow/contrib/hooks/ftp_hook.py                  |   1 -
 airflow/contrib/hooks/gcp_bigtable_hook.py         |   1 -
 airflow/contrib/hooks/gcp_cloud_build_hook.py      |   1 -
 airflow/contrib/hooks/gcp_dlp_hook.py              |   1 -
 airflow/contrib/hooks/gcp_mlengine_hook.py         |   1 -
 airflow/contrib/hooks/gcp_natural_language_hook.py |   1 -
 airflow/contrib/hooks/gcp_pubsub_hook.py           |   1 -
 airflow/contrib/hooks/gcp_tasks_hook.py            |   1 -
 airflow/contrib/hooks/gcp_translate_hook.py        |   1 -
 .../contrib/hooks/gcp_video_intelligence_hook.py   |   1 -
 airflow/contrib/hooks/gcp_vision_hook.py           |   1 -
 airflow/contrib/hooks/gdrive_hook.py               |   1 -
 airflow/contrib/hooks/grpc_hook.py                 |   1 -
 airflow/contrib/hooks/imap_hook.py                 |   1 -
 airflow/contrib/hooks/jenkins_hook.py              |   1 -
 airflow/contrib/hooks/jira_hook.py                 |   1 -
 airflow/contrib/hooks/mongo_hook.py                |   1 -
 airflow/contrib/hooks/openfaas_hook.py             |   1 -
 airflow/contrib/hooks/opsgenie_alert_hook.py       |   1 -
 airflow/contrib/hooks/pagerduty_hook.py            |   1 -
 airflow/contrib/hooks/pinot_hook.py                |   1 -
 airflow/contrib/hooks/qubole_check_hook.py         |   1 -
 airflow/contrib/hooks/qubole_hook.py               |   1 -
 airflow/contrib/hooks/redis_hook.py                |   1 -
 airflow/contrib/hooks/redshift_hook.py             |   1 -
 airflow/contrib/hooks/sagemaker_hook.py            |   1 -
 airflow/contrib/hooks/salesforce_hook.py           |   1 -
 airflow/contrib/hooks/segment_hook.py              |   1 -
 airflow/contrib/hooks/sftp_hook.py                 |   1 -
 airflow/contrib/hooks/slack_webhook_hook.py        |   1 -
 airflow/contrib/hooks/snowflake_hook.py            |   1 -
 airflow/contrib/hooks/spark_jdbc_hook.py           |   1 -
 airflow/contrib/hooks/spark_sql_hook.py            |   1 -
 airflow/contrib/hooks/spark_submit_hook.py         |   1 -
 airflow/contrib/hooks/sqoop_hook.py                |   1 -
 airflow/contrib/hooks/ssh_hook.py                  |   1 -
 airflow/contrib/hooks/vertica_hook.py              |   1 -
 airflow/contrib/hooks/wasb_hook.py                 |   1 -
 airflow/contrib/hooks/winrm_hook.py                |   1 -
 airflow/contrib/operators/adls_list_operator.py    |   1 -
 airflow/contrib/operators/aws_athena_operator.py   |   1 -
 .../contrib/operators/aws_sqs_publish_operator.py  |   1 -
 airflow/contrib/operators/awsbatch_operator.py     |   1 -
 .../azure_container_instances_operator.py          |   1 -
 airflow/contrib/operators/azure_cosmos_operator.py |   1 -
 .../contrib/operators/bigquery_check_operator.py   |   1 -
 airflow/contrib/operators/bigquery_get_data.py     |   1 -
 airflow/contrib/operators/bigquery_operator.py     |   1 -
 airflow/contrib/operators/bigquery_to_bigquery.py  |   1 -
 .../operators/bigquery_to_mysql_operator.py        |   1 -
 airflow/contrib/operators/databricks_operator.py   |   1 -
 airflow/contrib/operators/dingding_operator.py     |   1 -
 .../contrib/operators/discord_webhook_operator.py  |   1 -
 airflow/contrib/operators/docker_swarm_operator.py |   1 -
 airflow/contrib/operators/druid_operator.py        |   1 -
 airflow/contrib/operators/dynamodb_to_s3.py        |   1 -
 airflow/contrib/operators/ecs_operator.py          |   1 -
 .../contrib/operators/emr_add_steps_operator.py    |   1 -
 .../operators/emr_create_job_flow_operator.py      |   1 -
 .../operators/emr_terminate_job_flow_operator.py   |   1 -
 airflow/contrib/operators/file_to_wasb.py          |   1 -
 airflow/contrib/operators/gcp_bigtable_operator.py |   1 -
 .../contrib/operators/gcp_cloud_build_operator.py  |   1 -
 airflow/contrib/operators/gcp_dlp_operator.py      |   1 -
 airflow/contrib/operators/gcp_tasks_operator.py    |   1 -
 .../contrib/operators/gcp_translate_operator.py    |   1 -
 .../operators/gcp_translate_speech_operator.py     |   3 +-
 .../operators/gcp_video_intelligence_operator.py   |   1 -
 airflow/contrib/operators/gcp_vision_operator.py   |   2 +-
 .../operators/gcs_to_gcs_transfer_operator.py      |   2 -
 .../contrib/operators/gcs_to_gdrive_operator.py    |   1 -
 airflow/contrib/operators/grpc_operator.py         |   1 -
 airflow/contrib/operators/hive_to_dynamodb.py      |   1 -
 .../operators/imap_attachment_to_s3_operator.py    |   1 -
 .../operators/jenkins_job_trigger_operator.py      |   1 -
 airflow/contrib/operators/jira_operator.py         |   1 -
 .../contrib/operators/kubernetes_pod_operator.py   |   1 -
 airflow/contrib/operators/mongo_to_s3.py           |   1 -
 .../contrib/operators/opsgenie_alert_operator.py   |   1 -
 .../oracle_to_azure_data_lake_transfer.py          |   1 -
 .../contrib/operators/oracle_to_oracle_transfer.py |   1 -
 airflow/contrib/operators/qubole_check_operator.py |   1 -
 airflow/contrib/operators/qubole_operator.py       |   1 -
 .../contrib/operators/redis_publish_operator.py    |   1 -
 .../contrib/operators/s3_copy_object_operator.py   |   1 -
 .../operators/s3_delete_objects_operator.py        |   1 -
 airflow/contrib/operators/s3_list_operator.py      |   1 -
 airflow/contrib/operators/s3_to_gcs_operator.py    |   1 -
 .../operators/s3_to_gcs_transfer_operator.py       |   1 -
 airflow/contrib/operators/s3_to_sftp_operator.py   |   1 -
 .../contrib/operators/sagemaker_base_operator.py   |   1 -
 .../sagemaker_endpoint_config_operator.py          |   1 -
 .../operators/sagemaker_endpoint_operator.py       |   1 -
 .../contrib/operators/sagemaker_model_operator.py  |   1 -
 .../operators/sagemaker_training_operator.py       |   1 -
 .../operators/sagemaker_transform_operator.py      |   1 -
 .../contrib/operators/sagemaker_tuning_operator.py |   1 -
 .../operators/segment_track_event_operator.py      |   1 -
 airflow/contrib/operators/sftp_operator.py         |   1 -
 airflow/contrib/operators/sftp_to_s3_operator.py   |   1 -
 .../contrib/operators/slack_webhook_operator.py    |   1 -
 airflow/contrib/operators/snowflake_operator.py    |   1 -
 airflow/contrib/operators/sns_publish_operator.py  |   1 -
 airflow/contrib/operators/spark_jdbc_operator.py   |   1 -
 airflow/contrib/operators/spark_sql_operator.py    |   1 -
 airflow/contrib/operators/spark_submit_operator.py |   1 -
 airflow/contrib/operators/sqoop_operator.py        |   1 -
 airflow/contrib/operators/ssh_operator.py          |   1 -
 airflow/contrib/operators/vertica_operator.py      |   1 -
 airflow/contrib/operators/vertica_to_mysql.py      |   1 -
 .../contrib/operators/wasb_delete_blob_operator.py |   1 -
 airflow/contrib/operators/winrm_operator.py        |   1 -
 airflow/contrib/secrets/aws_secrets_manager.py     |   1 -
 airflow/contrib/secrets/aws_systems_manager.py     |   1 -
 airflow/contrib/secrets/azure_key_vault.py         |   1 -
 airflow/contrib/secrets/gcp_secrets_manager.py     |   1 -
 airflow/contrib/secrets/hashicorp_vault.py         |   1 -
 airflow/contrib/sensors/aws_athena_sensor.py       |   1 -
 .../sensors/aws_glue_catalog_partition_sensor.py   |   1 -
 .../contrib/sensors/aws_redshift_cluster_sensor.py |   1 -
 airflow/contrib/sensors/aws_sqs_sensor.py          |   1 -
 airflow/contrib/sensors/azure_cosmos_sensor.py     |   1 -
 airflow/contrib/sensors/bash_sensor.py             |   1 -
 airflow/contrib/sensors/cassandra_record_sensor.py |   1 -
 airflow/contrib/sensors/cassandra_table_sensor.py  |   1 -
 airflow/contrib/sensors/celery_queue_sensor.py     |   1 -
 airflow/contrib/sensors/datadog_sensor.py          |   1 -
 airflow/contrib/sensors/emr_base_sensor.py         |   1 -
 airflow/contrib/sensors/emr_job_flow_sensor.py     |   1 -
 airflow/contrib/sensors/emr_step_sensor.py         |   1 -
 airflow/contrib/sensors/file_sensor.py             |   1 -
 airflow/contrib/sensors/ftp_sensor.py              |   1 -
 airflow/contrib/sensors/hdfs_sensor.py             |   3 -
 airflow/contrib/sensors/imap_attachment_sensor.py  |   1 -
 airflow/contrib/sensors/jira_sensor.py             |   1 -
 airflow/contrib/sensors/mongo_sensor.py            |   1 -
 airflow/contrib/sensors/pubsub_sensor.py           |   1 -
 airflow/contrib/sensors/python_sensor.py           |   1 -
 airflow/contrib/sensors/qubole_sensor.py           |   1 -
 airflow/contrib/sensors/redis_key_sensor.py        |   1 -
 airflow/contrib/sensors/redis_pub_sub_sensor.py    |   1 -
 airflow/contrib/sensors/sagemaker_base_sensor.py   |   1 -
 .../contrib/sensors/sagemaker_endpoint_sensor.py   |   1 -
 .../contrib/sensors/sagemaker_training_sensor.py   |   1 -
 .../contrib/sensors/sagemaker_transform_sensor.py  |   1 -
 airflow/contrib/sensors/sagemaker_tuning_sensor.py |   1 -
 airflow/contrib/sensors/sftp_sensor.py             |   1 -
 airflow/contrib/sensors/wasb_sensor.py             |   1 -
 airflow/contrib/sensors/weekday_sensor.py          |   1 -
 airflow/contrib/task_runner/cgroup_task_runner.py  |   1 -
 airflow/contrib/utils/gcp_field_sanitizer.py       |   1 -
 airflow/contrib/utils/gcp_field_validator.py       |   1 -
 .../log/task_handler_with_custom_formatter.py      |   1 -
 airflow/contrib/utils/mlengine_operator_utils.py   |   1 -
 .../contrib/utils/mlengine_prediction_summary.py   |   1 -
 airflow/contrib/utils/weekday.py                   |   1 -
 airflow/decorators/__init__.py                     |   4 +-
 airflow/decorators/base.py                         |   2 +-
 airflow/decorators/python.py                       |   2 +-
 airflow/decorators/python_virtualenv.py            |   2 +-
 airflow/decorators/task_group.py                   |   2 +-
 airflow/example_dags/example_branch_labels.py      |   4 +-
 airflow/example_dags/example_branch_operator.py    |   2 +-
 airflow/example_dags/libs/helper.py                |   3 +-
 airflow/example_dags/tutorial_etl_dag.py           |   1 -
 airflow/example_dags/tutorial_taskflow_api_etl.py  |   1 -
 .../tutorial_taskflow_api_etl_virtualenv.py        |   1 -
 airflow/executors/celery_executor.py               |  26 +-
 airflow/executors/debug_executor.py                |  12 +-
 airflow/executors/kubernetes_executor.py           |  18 +-
 airflow/executors/local_executor.py                |  21 +-
 airflow/hooks/S3_hook.py                           |   3 +-
 airflow/hooks/base_hook.py                         |   1 -
 airflow/hooks/dbapi.py                             |   2 +-
 airflow/hooks/dbapi_hook.py                        |   1 -
 airflow/hooks/docker_hook.py                       |   1 -
 airflow/hooks/druid_hook.py                        |   1 -
 airflow/hooks/hdfs_hook.py                         |   1 -
 airflow/hooks/hive_hooks.py                        |   1 -
 airflow/hooks/http_hook.py                         |   1 -
 airflow/hooks/jdbc_hook.py                         |   1 -
 airflow/hooks/mssql_hook.py                        |   1 -
 airflow/hooks/mysql_hook.py                        |   1 -
 airflow/hooks/oracle_hook.py                       |   1 -
 airflow/hooks/pig_hook.py                          |   1 -
 airflow/hooks/postgres_hook.py                     |   1 -
 airflow/hooks/presto_hook.py                       |   1 -
 airflow/hooks/samba_hook.py                        |   1 -
 airflow/hooks/slack_hook.py                        |   1 -
 airflow/hooks/sqlite_hook.py                       |   1 -
 airflow/hooks/subprocess.py                        |   3 +-
 airflow/hooks/webhdfs_hook.py                      |   1 -
 airflow/hooks/zendesk_hook.py                      |   1 -
 airflow/jobs/__init__.py                           |   6 +-
 airflow/jobs/backfill_job.py                       |  13 +-
 airflow/jobs/base_job.py                           |   2 +-
 airflow/jobs/local_task_job.py                     |  10 +-
 airflow/jobs/scheduler_job.py                      |  25 +-
 airflow/kubernetes/kube_client.py                  |   7 +-
 airflow/kubernetes/kube_config.py                  |   4 +-
 airflow/kubernetes/pod.py                          |   7 +-
 airflow/kubernetes/pod_generator.py                |  11 +-
 airflow/kubernetes/pod_generator_deprecated.py     |   2 +-
 airflow/kubernetes/pod_launcher.py                 |   4 +-
 airflow/kubernetes/pod_runtime_info_env.py         |   2 +-
 airflow/kubernetes/refresh_config.py               |   8 +-
 airflow/kubernetes/volume.py                       |   2 +-
 airflow/kubernetes/volume_mount.py                 |   2 +-
 airflow/lineage/__init__.py                        |   4 +-
 airflow/lineage/backend.py                         |   2 +-
 airflow/lineage/entities.py                        |   5 +-
 airflow/logging_config.py                          |   2 +-
 airflow/migrations/env.py                          |   3 +-
 .../versions/03bc53e68815_add_sm_dag_index.py      |   4 +-
 .../versions/05f30312d566_merge_heads.py           |   4 +-
 .../0a2a5b66e19d_add_task_reschedule_table.py      |  10 +-
 .../0e2a74e0fc9f_add_time_zone_awareness.py        |   4 +-
 ...dfa7_add_dag_id_state_index_on_dag_run_table.py |   4 +-
 .../versions/13eb55f81627_for_compatibility.py     |   4 +-
 .../versions/1507a7289a2f_create_is_encrypted.py   |   4 +-
 ...cfc09e3_add_is_encrypted_column_to_variable_.py |   4 +-
 .../migrations/versions/1b38cef5b76e_add_dagrun.py |   4 +-
 .../versions/211e584da130_add_ti_state_index.py    |   4 +-
 ...30d7c24_add_executor_config_to_task_instance.py |   4 +-
 .../versions/2e541a1dcfed_task_duration.py         |   4 +-
 .../versions/2e82aab8ef20_rename_user_table.py     |   4 +-
 ...338e90f54d61_more_logging_into_task_isntance.py |   4 +-
 ...7a1ff4_add_kubernetes_resource_checkpointing.py |   4 +-
 .../versions/40e67319e3a9_dagrun_config.py         |   4 +-
 .../versions/41f5f12752f8_add_superuser_field.py   |   4 +-
 .../versions/4446e08588_dagrun_start_end.py        |   4 +-
 ...236f1_add_fractional_seconds_to_mysql_tables.py |   8 +-
 .../versions/502898887f84_adding_extra_to_log.py   |   4 +-
 ...0_fix_mssql_exec_date_rendered_task_instance.py |   8 +-
 .../versions/52d714495f0_job_id_indices.py         |   4 +-
 .../561833c1c74b_add_password_column_to_user.py    |   4 +-
 ...73d9401f_add_description_field_to_connection.py |   2 +-
 ...7aae_fix_description_field_in_connection_to_.py |   8 +-
 .../64de9cddf6c9_add_task_fails_journal_table.py   |   4 +-
 .../849da589634d_prefix_dag_permissions.py         |  30 +-
 ...15af_add_rendered_task_instance_fields_table.py |   6 +-
 .../856955da8476_fix_sqlite_foreign_key.py         |   2 +-
 .../8646922c8a04_change_default_pool_slots_to_1.py |   4 +-
 ...0d1215c0_add_kubernetes_scheduler_uniqueness.py |   4 +-
 ...1e647c8_task_reschedule_fk_on_cascade_delete.py |   4 +-
 .../versions/947454bf1dff_add_ti_job_id_index.py   |   4 +-
 .../versions/952da73b5eff_add_dag_code_table.py    |   2 +-
 .../versions/9635ae0956e7_index_faskfail.py        |   4 +-
 ...606e2_add_scheduling_decision_to_dagrun_and_.py |   4 +-
 ...67d16b_add_pool_slots_field_to_task_instance.py |   4 +-
 .../versions/b0125267960b_merge_heads.py           |   4 +-
 ...cfc896_add_a_column_to_track_the_encryption_.py |   4 +-
 ...13e_add_notification_sent_column_to_sla_miss.py |   4 +-
 ...3e6c56_make_xcom_value_column_a_large_binary.py |   4 +-
 .../bf00311e1990_add_index_to_taskinstance.py      |   4 +-
 .../versions/c8ffec048a3b_add_fields_to_dag.py     |   4 +-
 ...623dc7_add_max_tries_column_to_task_instance.py |   8 +-
 .../versions/cf5dc11e79ad_drop_user_and_chart.py   |   4 +-
 .../d2ae31099d61_increase_text_size_for_mysql.py   |   8 +-
 .../d38e04c12aa2_add_serialized_dag_table.py       |  12 +-
 .../versions/dd25f486b8ea_add_idx_log_dag.py       |   4 +-
 .../dd4ecb8fbee3_add_schedule_interval_to_dag.py   |   4 +-
 .../e38be357a868_update_schema_for_smart_sensor.py |  10 +-
 .../versions/e3a246e0dc1_current_schema.py         |   4 +-
 ...ac86c_change_field_in_dagcode_to_mediumtext_.py |   8 +-
 .../f23433877c24_fix_mysql_not_null_constraint.py  |   4 +-
 .../versions/f2ca10b85618_add_dag_stats_table.py   |   4 +-
 airflow/models/__init__.py                         |   2 +-
 airflow/models/baseoperator.py                     |  61 +--
 airflow/models/connection.py                       |  14 +-
 airflow/models/crypto.py                           |   4 +-
 airflow/models/dag.py                              |   2 +-
 airflow/models/dagbag.py                           |  10 +-
 airflow/models/dagrun.py                           |   2 +-
 airflow/models/errors.py                           |   2 +-
 airflow/models/pool.py                             |   2 +-
 airflow/models/serialized_dag.py                   |   7 +-
 airflow/models/skipmixin.py                        |   2 +-
 airflow/models/taskinstance.py                     |  39 +-
 airflow/models/variable.py                         |   6 +-
 airflow/models/xcom_arg.py                         |   2 +-
 airflow/mypy/plugin/decorators.py                  |   8 +-
 airflow/operators/bash.py                          |   6 +-
 airflow/operators/bash_operator.py                 |   1 -
 airflow/operators/branch_operator.py               |   1 -
 airflow/operators/dagrun_operator.py               |   1 -
 airflow/operators/docker_operator.py               |   1 -
 airflow/operators/druid_check_operator.py          |   1 -
 airflow/operators/dummy_operator.py                |   1 -
 airflow/operators/email.py                         |   6 +-
 airflow/operators/email_operator.py                |   1 -
 airflow/operators/gcs_to_s3.py                     |   1 -
 airflow/operators/hive_operator.py                 |   1 -
 airflow/operators/hive_stats_operator.py           |   1 -
 airflow/operators/hive_to_druid.py                 |   1 -
 airflow/operators/hive_to_mysql.py                 |   1 -
 airflow/operators/hive_to_samba_operator.py        |   1 -
 airflow/operators/http_operator.py                 |   1 -
 airflow/operators/jdbc_operator.py                 |   1 -
 airflow/operators/latest_only_operator.py          |   1 -
 airflow/operators/mssql_operator.py                |   1 -
 airflow/operators/mysql_operator.py                |   1 -
 airflow/operators/oracle_operator.py               |   1 -
 airflow/operators/papermill_operator.py            |   1 -
 airflow/operators/pig_operator.py                  |   1 -
 airflow/operators/postgres_operator.py             |   1 -
 airflow/operators/presto_check_operator.py         |   3 +-
 airflow/operators/presto_to_mysql.py               |   1 -
 airflow/operators/python.py                        |   4 +-
 airflow/operators/python_operator.py               |   1 -
 airflow/operators/s3_file_transform_operator.py    |   1 -
 airflow/operators/slack_operator.py                |   1 -
 airflow/operators/sql.py                           |   6 +-
 airflow/operators/sqlite_operator.py               |   1 -
 airflow/operators/subdag_operator.py               |   1 -
 airflow/plugins_manager.py                         |  31 +-
 airflow/provider.yaml.schema.json                  |   7 +
 .../example_google_api_to_s3_transfer_advanced.py  |   3 +-
 airflow/providers/amazon/aws/hooks/athena.py       |   8 +-
 airflow/providers/amazon/aws/hooks/aws_dynamodb.py |   1 -
 airflow/providers/amazon/aws/hooks/base_aws.py     |   4 +-
 airflow/providers/amazon/aws/hooks/batch_client.py |  12 +-
 airflow/providers/amazon/aws/hooks/glue.py         |   2 +-
 airflow/providers/amazon/aws/hooks/glue_crawler.py |   4 +-
 airflow/providers/amazon/aws/hooks/redshift.py     |   2 +-
 airflow/providers/amazon/aws/hooks/s3.py           |   4 +-
 airflow/providers/amazon/aws/hooks/sagemaker.py    |  10 +-
 airflow/providers/amazon/aws/hooks/ses.py          |   2 +-
 .../amazon/aws/log/cloudwatch_task_handler.py      |   4 +-
 .../providers/amazon/aws/log/s3_task_handler.py    |  10 +-
 airflow/providers/amazon/aws/operators/athena.py   |   4 +-
 airflow/providers/amazon/aws/operators/batch.py    |   6 +-
 airflow/providers/amazon/aws/operators/datasync.py |   1 -
 airflow/providers/amazon/aws/operators/ecs.py      |   7 +-
 airflow/providers/amazon/aws/operators/glue.py     |   2 +-
 .../amazon/aws/operators/s3_file_transform.py      |   2 +-
 .../amazon/aws/operators/sagemaker_base.py         |   4 +-
 .../providers/amazon/aws/sensors/sagemaker_base.py |   2 +-
 .../amazon/aws/transfers/dynamodb_to_s3.py         |   4 +-
 .../providers/amazon/aws/transfers/exasol_to_s3.py |   2 +-
 .../providers/amazon/aws/transfers/gcs_to_s3.py    |   2 +-
 .../amazon/aws/transfers/hive_to_dynamodb.py       |   2 +-
 .../providers/amazon/aws/transfers/mongo_to_s3.py  |   1 -
 .../amazon/aws/transfers/redshift_to_s3.py         |   2 +-
 airflow/providers/apache/beam/hooks/beam.py        |   4 +-
 airflow/providers/apache/beam/operators/beam.py    |  15 +-
 .../providers/apache/cassandra/hooks/cassandra.py  |   2 +-
 .../apache/druid/transfers/hive_to_druid.py        |   2 +-
 airflow/providers/apache/hdfs/hooks/hdfs.py        |   2 +-
 airflow/providers/apache/hdfs/hooks/webhdfs.py     |   2 +-
 airflow/providers/apache/hdfs/sensors/hdfs.py      |   2 +-
 airflow/providers/apache/hive/hooks/hive.py        |  10 +-
 airflow/providers/apache/hive/operators/hive.py    |   1 -
 airflow/providers/apache/hive/provider.yaml        |   3 +
 .../apache/hive/transfers/mssql_to_hive.py         |   3 +-
 .../apache/hive/transfers/mysql_to_hive.py         |   2 +-
 .../providers/apache/hive/transfers/s3_to_hive.py  |   4 +-
 .../providers/apache/kylin/operators/kylin_cube.py |   1 -
 airflow/providers/apache/livy/hooks/livy.py        |   3 -
 airflow/providers/apache/livy/operators/livy.py    |   1 -
 airflow/providers/apache/pinot/hooks/pinot.py      |   4 +-
 airflow/providers/apache/spark/hooks/spark_jdbc.py |   2 -
 .../apache/spark/hooks/spark_jdbc_script.py        |   2 -
 airflow/providers/apache/spark/hooks/spark_sql.py  |   3 +-
 .../providers/apache/spark/hooks/spark_submit.py   |   3 -
 .../providers/apache/spark/operators/spark_jdbc.py |   2 -
 .../providers/apache/spark/operators/spark_sql.py  |   1 -
 .../apache/spark/operators/spark_submit.py         |   2 -
 airflow/providers/apache/sqoop/hooks/sqoop.py      |   3 -
 airflow/providers/apache/sqoop/operators/sqoop.py  |   2 -
 airflow/providers/asana/hooks/asana.py             |  16 +-
 .../backcompat/backwards_compat_converters.py      |   2 +-
 .../cncf/kubernetes/operators/kubernetes_pod.py    |   4 +-
 airflow/providers/databricks/hooks/databricks.py   |   2 +-
 .../providers/databricks/operators/databricks.py   |   2 -
 airflow/providers/datadog/hooks/datadog.py         |   1 -
 .../example_dags/example_docker_copy_data.py       |   2 +-
 airflow/providers/docker/operators/docker.py       |   6 +-
 .../providers/elasticsearch/log/es_task_handler.py |  14 +-
 airflow/providers/ftp/hooks/ftp.py                 |   4 +-
 .../cloud/example_dags/example_automl_tables.py    |  28 +-
 .../cloud/example_dags/example_bigquery_dts.py     |   8 +-
 .../google/cloud/example_dags/example_bigtable.py  |   4 +-
 .../cloud/example_dags/example_cloud_build.py      |   4 +-
 .../google/cloud/example_dags/example_cloud_sql.py |  44 +-
 .../google/cloud/example_dags/example_tasks.py     |   2 +-
 .../google/cloud/example_dags/example_vision.py    |  10 +-
 airflow/providers/google/cloud/hooks/automl.py     |   2 +-
 airflow/providers/google/cloud/hooks/bigquery.py   |  39 +-
 airflow/providers/google/cloud/hooks/bigtable.py   |   1 -
 .../providers/google/cloud/hooks/cloud_build.py    |   9 +-
 airflow/providers/google/cloud/hooks/cloud_sql.py  |  37 +-
 .../cloud/hooks/cloud_storage_transfer_service.py  |  35 +-
 airflow/providers/google/cloud/hooks/compute.py    |  21 +-
 .../providers/google/cloud/hooks/compute_ssh.py    |  28 +-
 .../providers/google/cloud/hooks/datacatalog.py    |   4 +-
 airflow/providers/google/cloud/hooks/dataflow.py   |  19 +-
 airflow/providers/google/cloud/hooks/datafusion.py |  17 +-
 airflow/providers/google/cloud/hooks/dataproc.py   |   6 +-
 airflow/providers/google/cloud/hooks/datastore.py  |  46 +-
 airflow/providers/google/cloud/hooks/dlp.py        |   1 -
 airflow/providers/google/cloud/hooks/functions.py  |  14 +-
 airflow/providers/google/cloud/hooks/gcs.py        |  14 +-
 airflow/providers/google/cloud/hooks/gdm.py        |  12 +-
 .../google/cloud/hooks/kubernetes_engine.py        |   2 +-
 .../providers/google/cloud/hooks/life_sciences.py  |   9 +-
 airflow/providers/google/cloud/hooks/mlengine.py   |  27 +-
 airflow/providers/google/cloud/hooks/pubsub.py     |  19 +-
 .../providers/google/cloud/hooks/secret_manager.py |   2 +-
 airflow/providers/google/cloud/hooks/spanner.py    |   4 +-
 .../providers/google/cloud/hooks/text_to_speech.py |   6 +-
 airflow/providers/google/cloud/hooks/vision.py     |  17 +-
 airflow/providers/google/cloud/hooks/workflows.py  |   4 -
 .../providers/google/cloud/log/gcs_task_handler.py |  11 +-
 .../google/cloud/log/stackdriver_task_handler.py   |   6 +-
 airflow/providers/google/cloud/operators/automl.py |   6 +-
 .../providers/google/cloud/operators/bigquery.py   |   8 +-
 .../providers/google/cloud/operators/bigtable.py   |   2 +-
 .../operators/cloud_storage_transfer_service.py    |   4 +-
 .../google/cloud/operators/datacatalog.py          |   8 +-
 .../providers/google/cloud/operators/dataflow.py   |  36 +-
 .../providers/google/cloud/operators/datafusion.py |   2 +-
 .../providers/google/cloud/operators/dataproc.py   |  13 +-
 .../providers/google/cloud/operators/datastore.py  |   2 +-
 airflow/providers/google/cloud/operators/dlp.py    |   2 +-
 .../providers/google/cloud/operators/mlengine.py   |   6 +-
 airflow/providers/google/cloud/operators/pubsub.py |   5 +-
 .../google/cloud/operators/stackdriver.py          |   2 -
 airflow/providers/google/cloud/operators/tasks.py  |   2 +-
 .../providers/google/cloud/operators/workflows.py  |   4 -
 .../google/cloud/secrets/secret_manager.py         |   2 +-
 airflow/providers/google/cloud/sensors/dataproc.py |   2 +-
 airflow/providers/google/cloud/sensors/gcs.py      |   4 +-
 airflow/providers/google/cloud/sensors/pubsub.py   |   2 +-
 .../cloud/transfers/azure_fileshare_to_gcs.py      |   1 -
 .../google/cloud/transfers/bigquery_to_bigquery.py |   2 +-
 .../google/cloud/transfers/bigquery_to_gcs.py      |   2 +-
 .../google/cloud/transfers/bigquery_to_mysql.py    |   2 +-
 .../google/cloud/transfers/cassandra_to_gcs.py     |   8 +-
 .../google/cloud/transfers/gcs_to_bigquery.py      |   2 -
 .../providers/google/cloud/transfers/gcs_to_gcs.py |   2 +-
 .../google/cloud/transfers/gcs_to_local.py         |   4 +-
 .../google/cloud/transfers/gcs_to_sftp.py          |   1 -
 .../google/cloud/transfers/oracle_to_gcs.py        |   2 +-
 .../providers/google/cloud/transfers/s3_to_gcs.py  |   2 -
 .../providers/google/cloud/transfers/sql_to_gcs.py |   7 +-
 .../google/cloud/utils/credentials_provider.py     |  13 +-
 .../google/cloud/utils/field_sanitizer.py          |   2 +-
 .../google/cloud/utils/mlengine_operator_utils.py  |   4 +-
 .../cloud/utils/mlengine_prediction_summary.py     |   7 +-
 .../google/common/auth_backend/google_openid.py    |   2 +-
 .../providers/google/common/hooks/base_google.py   |  18 +-
 .../providers/google/firebase/hooks/firestore.py   |   8 +-
 .../google/marketing_platform/hooks/analytics.py   |  14 +-
 .../marketing_platform/hooks/campaign_manager.py   |  24 +-
 .../marketing_platform/hooks/display_video.py      |  44 +-
 .../google/marketing_platform/hooks/search_ads.py  |  16 +-
 .../operators/campaign_manager.py                  |   2 +-
 airflow/providers/google/suite/hooks/drive.py      |  12 +-
 airflow/providers/google/suite/hooks/sheets.py     |  18 +-
 airflow/providers/grpc/hooks/grpc.py               |   4 +-
 .../hashicorp/_internal_client/vault_client.py     |   6 +-
 airflow/providers/hashicorp/hooks/vault.py         |   4 +-
 airflow/providers/hashicorp/secrets/vault.py       |   5 +-
 .../jenkins/operators/jenkins_job_trigger.py       |   4 +-
 airflow/providers/jira/sensors/jira.py             |   4 +-
 .../providers/microsoft/azure/hooks/azure_batch.py |   2 +-
 .../microsoft/azure/hooks/azure_data_factory.py    |   2 +-
 airflow/providers/microsoft/azure/hooks/wasb.py    |   2 +-
 .../microsoft/azure/log/wasb_task_handler.py       |   2 +-
 .../microsoft/azure/operators/azure_batch.py       |   3 +-
 .../azure/operators/azure_container_instances.py   |   7 +-
 .../azure/transfers/oracle_to_azure_data_lake.py   |   1 -
 airflow/providers/microsoft/mssql/hooks/mssql.py   |  16 +-
 airflow/providers/microsoft/winrm/hooks/winrm.py   |   3 +-
 .../providers/microsoft/winrm/operators/winrm.py   |   3 +-
 airflow/providers/mysql/hooks/mysql.py             |  14 +-
 .../providers/mysql/transfers/vertica_to_mysql.py  |   2 +-
 airflow/providers/odbc/hooks/odbc.py               |   1 -
 .../providers/opsgenie/operators/opsgenie_alert.py |   1 -
 airflow/providers/oracle/hooks/oracle.py           |   5 +-
 .../providers/oracle/transfers/oracle_to_oracle.py |   1 -
 airflow/providers/pagerduty/hooks/pagerduty.py     |   1 -
 airflow/providers/postgres/hooks/postgres.py       |   1 -
 airflow/providers/presto/hooks/presto.py           |  10 +-
 airflow/providers/qubole/hooks/qubole.py           |   5 +-
 airflow/providers/qubole/hooks/qubole_check.py     |   2 +-
 airflow/providers/qubole/operators/qubole_check.py |   4 +-
 airflow/providers/qubole/sensors/qubole.py         |   6 +-
 airflow/providers/salesforce/hooks/tableau.py      |   1 -
 .../operators/tableau_refresh_workbook.py          |   1 -
 .../salesforce/sensors/tableau_job_status.py       |   1 -
 .../segment/operators/segment_track_event.py       |   1 -
 airflow/providers/sendgrid/utils/emailer.py        |   2 +-
 .../providers/singularity/operators/singularity.py |   2 +-
 airflow/providers/slack/hooks/slack.py             |   4 +-
 airflow/providers/slack/hooks/slack_webhook.py     |   1 -
 airflow/providers/slack/operators/slack.py         |   4 +-
 airflow/providers/slack/operators/slack_webhook.py |   1 -
 airflow/providers/snowflake/hooks/snowflake.py     |  12 +-
 .../snowflake/transfers/snowflake_to_slack.py      |   2 +-
 airflow/providers/ssh/hooks/ssh.py                 |   4 +-
 airflow/providers/trino/hooks/trino.py             |  10 +-
 airflow/providers/vertica/hooks/vertica.py         |   2 +-
 .../yandex/operators/yandexcloud_dataproc.py       |   7 -
 airflow/providers/zendesk/hooks/zendesk.py         |   1 -
 airflow/providers_manager.py                       |  22 +-
 airflow/secrets/base_secrets.py                    |   2 +-
 airflow/secrets/environment_variables.py           |   1 -
 airflow/secrets/local_filesystem.py                |   2 +-
 airflow/secrets/metastore.py                       |   2 -
 airflow/security/kerberos.py                       |   2 +-
 airflow/security/utils.py                          |  10 +-
 airflow/sensors/base.py                            |   2 +-
 airflow/sensors/base_sensor_operator.py            |   1 -
 airflow/sensors/bash.py                            |   2 +-
 airflow/sensors/date_time_sensor.py                |   1 -
 airflow/sensors/external_task.py                   |   4 +-
 airflow/sensors/external_task_sensor.py            |   1 -
 airflow/sensors/hdfs_sensor.py                     |   1 -
 airflow/sensors/hive_partition_sensor.py           |   1 -
 airflow/sensors/http_sensor.py                     |   1 -
 airflow/sensors/metastore_partition_sensor.py      |   1 -
 airflow/sensors/named_hive_partition_sensor.py     |   1 -
 airflow/sensors/s3_key_sensor.py                   |   1 -
 airflow/sensors/s3_prefix_sensor.py                |   1 -
 airflow/sensors/smart_sensor.py                    |  18 +-
 airflow/sensors/sql_sensor.py                      |   1 -
 airflow/sensors/time_delta_sensor.py               |   1 -
 airflow/sensors/web_hdfs_sensor.py                 |   1 -
 airflow/sentry.py                                  |   2 +-
 airflow/serialization/json_schema.py               |   3 +-
 airflow/serialization/serialized_objects.py        |  23 +-
 airflow/settings.py                                |  25 +-
 airflow/stats.py                                   |  12 +-
 airflow/task/task_runner/__init__.py               |   2 +-
 airflow/task/task_runner/base_task_runner.py       |   3 +-
 airflow/task/task_runner/standard_task_runner.py   |   8 +-
 airflow/ti_deps/deps/not_previously_skipped_dep.py |   2 +-
 airflow/ti_deps/deps/task_not_running_dep.py       |   2 +-
 airflow/ti_deps/deps/trigger_rule_dep.py           |   2 +-
 airflow/typing_compat.py                           |   9 +-
 airflow/utils/cli.py                               |  10 +-
 airflow/utils/cli_action_loggers.py                |   6 +-
 airflow/utils/dag_processing.py                    |  14 +-
 airflow/utils/dates.py                             |   3 +-
 airflow/utils/db.py                                |  19 +-
 airflow/utils/decorators.py                        |   2 +-
 airflow/utils/edgemodifier.py                      |   2 +-
 airflow/utils/event_scheduler.py                   |   2 +-
 airflow/utils/file.py                              |   8 +-
 airflow/utils/helpers.py                           |   6 +-
 airflow/utils/log/cloudwatch_task_handler.py       |   1 -
 airflow/utils/log/es_task_handler.py               |   1 -
 airflow/utils/log/file_processor_handler.py        |   2 +-
 airflow/utils/log/file_task_handler.py             |  10 +-
 airflow/utils/log/gcs_task_handler.py              |   1 -
 airflow/utils/log/json_formatter.py                |   1 -
 airflow/utils/log/log_reader.py                    |   6 +-
 airflow/utils/log/logging_mixin.py                 |   9 +-
 airflow/utils/log/s3_task_handler.py               |   1 -
 airflow/utils/log/secrets_masker.py                |  19 +-
 airflow/utils/log/stackdriver_task_handler.py      |   1 -
 .../log/task_handler_with_custom_formatter.py      |   2 +-
 airflow/utils/log/wasb_task_handler.py             |   1 -
 airflow/utils/orm_event_handlers.py                |   4 +-
 airflow/utils/platform.py                          |   2 +-
 airflow/utils/process_utils.py                     |   4 +-
 airflow/utils/session.py                           |   2 +-
 airflow/utils/sqlalchemy.py                        |   1 -
 airflow/utils/task_group.py                        |   2 +-
 airflow/utils/timeout.py                           |   4 +-
 airflow/utils/types.py                             |   2 +-
 airflow/utils/weekday.py                           |   1 -
 airflow/utils/yaml.py                              |   2 +-
 airflow/www/api/experimental/endpoints.py          |   2 +-
 airflow/www/app.py                                 |   4 +-
 airflow/www/auth.py                                |   2 +-
 airflow/www/decorators.py                          |   4 +-
 airflow/www/extensions/init_jinja_globals.py       |   2 +-
 airflow/www/extensions/init_manifest_files.py      |   4 +-
 airflow/www/forms.py                               |   2 +-
 airflow/www/gunicorn_config.py                     |   6 +-
 airflow/www/security.py                            |   8 +-
 airflow/www/utils.py                               |  57 +-
 airflow/www/views.py                               | 207 +++----
 breeze                                             |  12 +-
 breeze-complete                                    |   7 +-
 chart/tests/helm_template_generator.py             |   4 +-
 chart/tests/test_basic_helm_chart.py               |   2 +-
 dev/import_all_classes.py                          |   2 +-
 .../PROVIDER_README_TEMPLATE.rst.jinja2            |   2 +
 dev/provider_packages/SETUP_TEMPLATE.py.jinja2     |   8 +-
 dev/provider_packages/prepare_provider_packages.py |  26 +-
 dev/retag_docker_images.py                         |   4 +-
 dev/send_email.py                                  |   4 +-
 docs/build_docs.py                                 |  24 +-
 docs/conf.py                                       |   7 +-
 docs/exts/__init__.py                              |   1 -
 docs/exts/airflow_intersphinx.py                   |   4 +-
 docs/exts/docroles.py                              |   8 +-
 docs/exts/docs_build/__init__.py                   |   1 -
 docs/exts/docs_build/dev_index_generator.py        |   2 +-
 docs/exts/docs_build/docs_builder.py               |   8 +-
 docs/exts/docs_build/errors.py                     |   2 +-
 docs/exts/docs_build/fetch_inventories.py          |   8 +-
 docs/exts/docs_build/lint_checks.py                |   4 +-
 docs/exts/docs_build/spelling_checks.py            |   2 +-
 docs/exts/exampleinclude.py                        |  16 +-
 docs/exts/operators_and_hooks_ref.py               |   5 +-
 docs/exts/providers_packages_ref.py                |   2 +-
 docs/exts/removemarktransform.py                   |   5 +-
 docs/exts/substitution_extensions.py               |   2 +-
 docs/publish_docs.py                               |   3 -
 docs/spelling_wordlist.txt                         |   2 -
 kubernetes_tests/test_kubernetes_pod_operator.py   |  18 +-
 .../test_kubernetes_pod_operator_backcompat.py     |   8 +-
 pylintrc                                           | 597 --------------------
 pylintrc-tests                                     | 606 ---------------------
 scripts/ci/docker-compose/local.yml                |   2 -
 scripts/ci/libraries/_all_libs.sh                  |   2 -
 scripts/ci/libraries/_build_images.sh              |   5 +-
 scripts/ci/libraries/_initialization.sh            |   4 +-
 scripts/ci/libraries/_local_mounts.sh              |   2 -
 scripts/ci/libraries/_push_pull_remove_images.sh   |   4 +-
 scripts/ci/libraries/_pylint.sh                    |  37 --
 .../pre_commit_check_extras_have_providers.py      |   2 +-
 .../pre_commit_check_setup_extra_packages_ref.py   |   2 +-
 scripts/ci/pre_commit/pre_commit_insert_extras.py  |   2 +-
 scripts/ci/pre_commit/pre_commit_pylint.sh         |  26 -
 scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py    |   2 +-
 scripts/ci/pylint_todo.txt                         |   5 -
 scripts/ci/selective_ci_checks.sh                  |   4 +-
 scripts/ci/static_checks/pylint.sh                 |  56 --
 scripts/ci/static_checks/refresh_pylint_todo.sh    |  33 --
 scripts/ci/tools/ci_fix_ownership.sh               |   2 +-
 scripts/ci/tools/prepare_prod_docker_images.sh     |   2 +-
 scripts/in_container/_in_container_utils.sh        |  57 --
 scripts/in_container/refresh_pylint_todo.sh        |  21 -
 scripts/in_container/run_pylint.sh                 |  49 --
 .../in_container/update_quarantined_test_status.py |   4 +-
 scripts/tools/generate-integrations-json.py        |   3 -
 scripts/tools/list-integrations.py                 |   2 +-
 setup.cfg                                          |   3 +-
 setup.py                                           |  23 +-
 tests/api/auth/backend/test_basic_auth.py          |   2 +-
 tests/api/common/experimental/test_mark_tasks.py   |   2 +-
 tests/api_connexion/endpoints/test_dag_endpoint.py |   4 +-
 .../endpoints/test_dag_run_endpoint.py             |   2 +-
 .../endpoints/test_extra_link_endpoint.py          |   4 +-
 .../endpoints/test_import_error_endpoint.py        |   2 +-
 tests/api_connexion/endpoints/test_log_endpoint.py |   6 +-
 .../api_connexion/endpoints/test_task_endpoint.py  |   2 +-
 .../endpoints/test_task_instance_endpoint.py       |  10 +-
 .../api_connexion/endpoints/test_xcom_endpoint.py  |   2 +-
 tests/api_connexion/schemas/test_error_schema.py   |   2 +-
 tests/api_connexion/test_basic_auth.py             |   2 +-
 tests/bats/breeze/test_breeze_complete.bats        |   8 +-
 tests/build_provider_packages_dependencies.py      |   6 +-
 tests/cli/commands/test_celery_command.py          |   2 +-
 tests/cli/commands/test_info_command.py            |   2 +-
 tests/cli/commands/test_role_command.py            |   2 +-
 tests/cli/commands/test_user_command.py            |   2 +-
 tests/cli/commands/test_webserver_command.py       |  59 +-
 tests/conftest.py                                  |   6 +-
 tests/core/test_core.py                            |   8 +-
 tests/core/test_logging_config.py                  |   7 +-
 tests/core/test_settings.py                        |   8 +-
 tests/dags/subdir1/test_ignore_this.py             |   2 +-
 tests/dags/test_mark_success.py                    |   4 +-
 tests/dags/test_on_failure_callback.py             |   2 +-
 tests/dags/test_subdag.py                          |   2 +-
 tests/decorators/test_python.py                    |  57 +-
 tests/decorators/test_python_virtualenv.py         |  28 +-
 tests/executors/test_celery_executor.py            |  11 +-
 tests/executors/test_kubernetes_executor.py        |  15 +-
 tests/executors/test_local_executor.py             |  10 +-
 tests/hooks/test_subprocess.py                     |   2 +-
 tests/jobs/test_backfill_job.py                    |   1 -
 tests/jobs/test_local_task_job.py                  |   8 +-
 tests/jobs/test_scheduler_job.py                   |  76 +--
 tests/models/test_baseoperator.py                  |  14 +-
 tests/models/test_connection.py                    |   5 +-
 tests/models/test_dag.py                           |  13 +-
 tests/models/test_dagbag.py                        |  10 +-
 tests/models/test_dagparam.py                      |   3 -
 tests/models/test_pool.py                          |  16 +-
 tests/models/test_taskinstance.py                  |  10 +-
 tests/models/test_xcom.py                          |   4 +-
 tests/operators/test_email.py                      |   2 +-
 tests/operators/test_python.py                     |  28 +-
 tests/operators/test_sql.py                        |   1 -
 tests/operators/test_weekday.py                    |   2 +-
 tests/plugins/test_plugins_manager.py              |  32 +-
 tests/providers/amazon/aws/hooks/conftest.py       |   1 -
 tests/providers/amazon/aws/hooks/test_base_aws.py  |   8 +-
 .../amazon/aws/hooks/test_batch_client.py          |   1 -
 .../amazon/aws/hooks/test_batch_waiters.py         |   2 -
 tests/providers/amazon/aws/hooks/test_s3.py        |  18 +-
 .../amazon/aws/log/test_s3_task_handler.py         |  20 +-
 .../providers/amazon/aws/operators/test_athena.py  |   4 -
 tests/providers/amazon/aws/operators/test_batch.py |   1 -
 tests/providers/amazon/aws/operators/test_ecs.py   |  14 +-
 .../amazon/aws/operators/test_glacier_system.py    |   2 +-
 .../aws/operators/test_sagemaker_training.py       |   1 -
 tests/providers/amazon/aws/sensors/test_s3_key.py  |   2 +-
 .../amazon/aws/transfers/test_dynamodb_to_s3.py    |   2 +-
 tests/providers/apache/beam/hooks/test_beam.py     |  12 +-
 .../apache/cassandra/hooks/test_cassandra.py       |   2 +-
 tests/providers/apache/druid/hooks/test_druid.py   |   2 +-
 tests/providers/apache/hive/hooks/test_hive.py     |  33 +-
 .../apache/hive/transfers/test_hive_to_mysql.py    |   9 +
 .../apache/hive/transfers/test_hive_to_samba.py    |   9 +
 .../apache/hive/transfers/test_mssql_to_hive.py    |  22 +-
 .../apache/hive/transfers/test_mysql_to_hive.py    |   7 +
 tests/providers/apache/livy/hooks/test_livy.py     |  18 +-
 tests/providers/apache/pinot/hooks/test_pinot.py   |   2 +-
 tests/providers/asana/hooks/test_asana.py          |  38 +-
 .../kubernetes/operators/test_kubernetes_pod.py    |   1 -
 tests/providers/docker/hooks/test_docker.py        |   8 +-
 tests/providers/docker/operators/test_docker.py    |   2 +-
 .../elasticsearch/hooks/test_elasticsearch.py      |   2 +-
 .../elasticsearch/log/elasticmock/__init__.py      |   2 +-
 .../log/elasticmock/fake_elasticsearch.py          |   6 +-
 .../elasticsearch/log/test_es_task_handler.py      |   6 +-
 .../_internal_client/test_secret_manager_client.py |   3 +-
 tests/providers/google/cloud/hooks/test_automl.py  |   2 +-
 .../providers/google/cloud/hooks/test_bigquery.py  |   5 +-
 .../google/cloud/hooks/test_cloud_build.py         |  10 +-
 .../google/cloud/hooks/test_cloud_memorystore.py   |  26 +-
 .../providers/google/cloud/hooks/test_cloud_sql.py |  63 +--
 .../hooks/test_cloud_storage_transfer_service.py   |  19 +-
 tests/providers/google/cloud/hooks/test_compute.py |   1 -
 .../google/cloud/hooks/test_datacatalog.py         |  84 +--
 .../providers/google/cloud/hooks/test_dataflow.py  |  34 +-
 .../google/cloud/hooks/test_datafusion.py          |   2 -
 .../providers/google/cloud/hooks/test_dataprep.py  |  18 +-
 .../providers/google/cloud/hooks/test_dataproc.py  |   2 +-
 .../providers/google/cloud/hooks/test_datastore.py |  20 +-
 tests/providers/google/cloud/hooks/test_dlp.py     |  30 +-
 .../providers/google/cloud/hooks/test_functions.py |   8 +-
 tests/providers/google/cloud/hooks/test_gcs.py     |  18 +-
 tests/providers/google/cloud/hooks/test_gdm.py     |   2 +-
 tests/providers/google/cloud/hooks/test_kms.py     |   2 +-
 .../google/cloud/hooks/test_kubernetes_engine.py   |   4 +-
 .../google/cloud/hooks/test_life_sciences.py       |  11 +-
 tests/providers/google/cloud/hooks/test_pubsub.py  |   6 +-
 .../google/cloud/hooks/test_secret_manager.py      |   1 -
 .../google/cloud/hooks/test_stackdriver.py         |  10 +-
 .../providers/google/cloud/hooks/test_workflows.py |   2 +-
 .../google/cloud/operators/test_cloud_sql.py       |   1 -
 .../test_cloud_storage_transfer_service.py         |  30 +-
 .../google/cloud/operators/test_compute.py         |   7 +-
 .../google/cloud/operators/test_dataprep_system.py |   2 +-
 .../google/cloud/operators/test_dataproc.py        |   4 +-
 tests/providers/google/cloud/operators/test_dlp.py |   2 +-
 .../google/cloud/operators/test_dlp_system.py      |   2 +-
 .../cloud/operators/test_kubernetes_engine.py      |   7 -
 .../google/cloud/operators/test_spanner.py         |  14 +-
 .../google/cloud/operators/test_speech_to_text.py  |   6 +-
 .../test_azure_fileshare_to_gcs_system.py          |   2 +-
 .../google/cloud/transfers/test_gcs_to_sftp.py     |   1 -
 .../google/cloud/transfers/test_mssql_to_gcs.py    |   2 +-
 .../google/cloud/transfers/test_mysql_to_gcs.py    |   6 +-
 .../google/cloud/transfers/test_oracle_to_gcs.py   |   4 +-
 .../google/cloud/transfers/test_postgres_to_gcs.py |   2 +-
 .../google/cloud/transfers/test_presto_to_gcs.py   |   4 +-
 .../google/cloud/transfers/test_sftp_to_gcs.py     |   1 -
 .../google/cloud/transfers/test_trino_to_gcs.py    |   4 +-
 .../google/cloud/utils/gcp_authenticator.py        |   2 +-
 .../common/auth_backend/test_google_openid.py      |   2 +-
 .../google/common/hooks/test_base_google.py        |   4 +-
 tests/providers/grpc/hooks/test_grpc.py            |   6 +-
 .../_internal_client/test_vault_client.py          |   3 +-
 tests/providers/hashicorp/hooks/test_vault.py      |   1 -
 tests/providers/imap/hooks/test_imap.py            |   2 +-
 tests/providers/jdbc/hooks/test_jdbc.py            |   2 +-
 tests/providers/jira/hooks/test_jira.py            |   2 +-
 tests/providers/jira/sensors/test_jira.py          |   2 +-
 .../azure/hooks/test_azure_data_factory.py         |   1 -
 .../microsoft/azure/operators/test_azure_batch.py  |   2 +-
 tests/providers/mysql/operators/test_mysql.py      |   2 +-
 tests/providers/odbc/hooks/test_odbc.py            |   1 -
 tests/providers/oracle/hooks/test_oracle.py        |   1 -
 tests/providers/postgres/hooks/test_postgres.py    |   2 +-
 tests/providers/qubole/hooks/test_qubole.py        |   1 -
 .../qubole/operators/test_qubole_check.py          |   1 -
 tests/providers/sftp/operators/test_sftp.py        |   6 +-
 tests/providers/snowflake/hooks/test_snowflake.py  |   2 +-
 tests/providers/sqlite/hooks/test_sqlite.py        |   2 +-
 tests/providers/ssh/hooks/test_ssh.py              |   6 +-
 tests/providers/ssh/operators/test_ssh.py          |   8 +-
 tests/providers/tableau/hooks/test_tableau.py      |   2 +-
 tests/security/test_kerberos.py                    |   8 +-
 tests/sensors/test_base.py                         |   1 +
 tests/sensors/test_external_task_sensor.py         |   4 +-
 tests/sensors/test_smart_sensor_operator.py        |   2 +
 tests/serialization/test_dag_serialization.py      |   4 +-
 .../task/task_runner/test_standard_task_runner.py  |   2 +-
 tests/test_utils/asserts.py                        |   2 +-
 tests/test_utils/fake_datetime.py                  |   2 +-
 tests/test_utils/hdfs_utils.py                     |   2 +-
 tests/test_utils/perf/dags/elastic_dag.py          |   1 -
 tests/test_utils/perf/perf_kit/memory.py           |   2 +-
 tests/test_utils/perf/perf_kit/sqlalchemy.py       |  36 +-
 .../perf/scheduler_dag_execution_timing.py         |  10 +-
 tests/test_utils/remote_user_api_auth_backend.py   |   2 +-
 tests/test_utils/reset_warning_registry.py         |   6 +-
 tests/ti_deps/deps/fake_models.py                  |   8 +-
 .../deps/test_dag_ti_slots_available_dep.py        |   2 +-
 tests/ti_deps/deps/test_dag_unpaused_dep.py        |   2 +-
 tests/ti_deps/deps/test_dagrun_exists_dep.py       |   2 +-
 tests/ti_deps/deps/test_dagrun_id_dep.py           |   2 +-
 tests/ti_deps/deps/test_not_in_retry_period_dep.py |   2 +-
 .../ti_deps/deps/test_pool_slots_available_dep.py  |   5 +-
 tests/ti_deps/deps/test_prev_dagrun_dep.py         |   2 +-
 tests/ti_deps/deps/test_ready_to_reschedule_dep.py |   2 +-
 tests/ti_deps/deps/test_runnable_exec_date_dep.py  |   2 +-
 tests/ti_deps/deps/test_task_concurrency.py        |   2 +-
 tests/ti_deps/deps/test_task_not_running_dep.py    |   2 +-
 tests/ti_deps/deps/test_trigger_rule_dep.py        |   2 +-
 tests/ti_deps/deps/test_valid_state_dep.py         |   2 +-
 tests/utils/log/test_log_reader.py                 |   2 +-
 tests/utils/test_dag_processing.py                 |   2 +-
 tests/utils/test_edgemodifier.py                   |  10 +-
 tests/utils/test_module_loading.py                 |   2 +-
 tests/utils/test_retries.py                        |   4 +-
 tests/utils/test_task_group.py                     |   2 -
 tests/www/api/experimental/test_endpoints.py       |   2 +-
 tests/www/test_security.py                         |   6 +-
 tests/www/views/conftest.py                        |   4 +-
 tests/www/views/test_views_acl.py                  |  22 +-
 tests/www/views/test_views_extra_links.py          |   4 +-
 tests/www/views/test_views_log.py                  |   4 +-
 tests/www/views/test_views_rendered.py             |   2 +-
 tests/www/views/test_views_tasks.py                |   2 +-
 912 files changed, 2090 insertions(+), 4585 deletions(-)
 copy {tests/providers/zendesk/hooks => airflow/compat}/__init__.py (100%)
 copy tests/dags_with_system_exit/c_system_exit.py => airflow/compat/functools.py (72%)
 delete mode 100644 pylintrc
 delete mode 100644 pylintrc-tests
 delete mode 100644 scripts/ci/libraries/_pylint.sh
 delete mode 100755 scripts/ci/pre_commit/pre_commit_pylint.sh
 delete mode 100644 scripts/ci/pylint_todo.txt
 delete mode 100755 scripts/ci/static_checks/pylint.sh
 delete mode 100755 scripts/ci/static_checks/refresh_pylint_todo.sh
 delete mode 100755 scripts/in_container/refresh_pylint_todo.sh
 delete mode 100755 scripts/in_container/run_pylint.sh

[airflow] 02/03: Introduce compat shim airflow.compat.functools (#15969)

Posted by jh...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jhtimmins pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit e2453602fd9f6055202424a6491c4ef37a85adca
Author: Tzu-ping Chung <tp...@astronomer.io>
AuthorDate: Tue May 25 21:36:00 2021 +0800

    Introduce compat shim airflow.compat.functools (#15969)
    
    This module shims 'cached_property' and 'cache' so modules don't need to
    all do their own ad-hoc try-except ImportError.
    
    (cherry picked from commit 3db347edcfe444a67e59e8cf0019e80a02dbceab)
---
 airflow/cli/commands/connection_command.py |  3 +--
 airflow/cli/commands/kubernetes_command.py |  3 +--
 airflow/cli/simple_table.py                |  2 +-
 airflow/compat/__init__.py                 | 16 +++++++++++++++
 airflow/compat/functools.py                | 33 ++++++++++++++++++++++++++++++
 airflow/configuration.py                   |  3 +--
 airflow/kubernetes/pod_generator.py        |  2 +-
 airflow/kubernetes/refresh_config.py       |  2 +-
 airflow/models/baseoperator.py             |  6 +-----
 airflow/operators/bash.py                  |  6 +-----
 airflow/operators/sql.py                   |  6 +-----
 airflow/providers_manager.py               |  2 +-
 airflow/secrets/local_filesystem.py        |  2 +-
 airflow/utils/log/log_reader.py            |  6 +-----
 airflow/utils/log/secrets_masker.py        | 15 +-------------
 airflow/utils/yaml.py                      |  2 +-
 airflow/www/views.py                       |  3 +--
 17 files changed, 64 insertions(+), 48 deletions(-)

diff --git a/airflow/cli/commands/connection_command.py b/airflow/cli/commands/connection_command.py
index 19912b6..c79ba6a 100644
--- a/airflow/cli/commands/connection_command.py
+++ b/airflow/cli/commands/connection_command.py
@@ -24,13 +24,12 @@ from urllib.parse import urlparse, urlunparse
 
 from sqlalchemy.orm import exc
 
-import airflow.utils.yaml as yaml
 from airflow.cli.simple_table import AirflowConsole
 from airflow.exceptions import AirflowNotFoundException
 from airflow.hooks.base import BaseHook
 from airflow.models import Connection
 from airflow.secrets.local_filesystem import load_connections_dict
-from airflow.utils import cli as cli_utils
+from airflow.utils import cli as cli_utils, yaml
 from airflow.utils.cli import suppress_logs_and_warning
 from airflow.utils.session import create_session
 
diff --git a/airflow/cli/commands/kubernetes_command.py b/airflow/cli/commands/kubernetes_command.py
index daf11a3..3aad616 100644
--- a/airflow/cli/commands/kubernetes_command.py
+++ b/airflow/cli/commands/kubernetes_command.py
@@ -22,14 +22,13 @@ from kubernetes import client
 from kubernetes.client.api_client import ApiClient
 from kubernetes.client.rest import ApiException
 
-import airflow.utils.yaml as yaml
 from airflow.executors.kubernetes_executor import KubeConfig, create_pod_id
 from airflow.kubernetes import pod_generator
 from airflow.kubernetes.kube_client import get_kube_client
 from airflow.kubernetes.pod_generator import PodGenerator
 from airflow.models import TaskInstance
 from airflow.settings import pod_mutation_hook
-from airflow.utils import cli as cli_utils
+from airflow.utils import cli as cli_utils, yaml
 from airflow.utils.cli import get_dag
 
 
diff --git a/airflow/cli/simple_table.py b/airflow/cli/simple_table.py
index 65e846e..d17f948 100644
--- a/airflow/cli/simple_table.py
+++ b/airflow/cli/simple_table.py
@@ -24,8 +24,8 @@ from rich.syntax import Syntax
 from rich.table import Table
 from tabulate import tabulate
 
-import airflow.utils.yaml as yaml
 from airflow.plugins_manager import PluginsDirectorySource
+from airflow.utils import yaml
 from airflow.utils.platform import is_tty
 
 
diff --git a/airflow/compat/__init__.py b/airflow/compat/__init__.py
new file mode 100644
index 0000000..13a8339
--- /dev/null
+++ b/airflow/compat/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/compat/functools.py b/airflow/compat/functools.py
new file mode 100644
index 0000000..10b4085
--- /dev/null
+++ b/airflow/compat/functools.py
@@ -0,0 +1,33 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import sys
+
+if sys.version_info >= (3, 8):
+    from functools import cached_property  # pylint: disable=no-name-in-module
+else:
+    from cached_property import cached_property
+
+if sys.version_info >= (3, 9):
+    from functools import cache  # pylint: disable=no-name-in-module
+else:
+    from functools import lru_cache
+
+    cache = lru_cache(maxsize=None)
+
+
+__all__ = ["cache", "cached_property"]
diff --git a/airflow/configuration.py b/airflow/configuration.py
index 263ba4b..ed38efd 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -36,6 +36,7 @@ from typing import Dict, List, Optional, Union
 
 from airflow.exceptions import AirflowConfigException
 from airflow.secrets import DEFAULT_SECRETS_SEARCH_PATH, BaseSecretsBackend
+from airflow.utils import yaml
 from airflow.utils.module_loading import import_string
 
 log = logging.getLogger(__name__)
@@ -97,8 +98,6 @@ def default_config_yaml() -> List[dict]:
 
     :return: Python dictionary containing configs & their info
     """
-    import airflow.utils.yaml as yaml
-
     with open(_default_config_file_path('config.yml')) as config_file:
         return yaml.safe_load(config_file)
 
diff --git a/airflow/kubernetes/pod_generator.py b/airflow/kubernetes/pod_generator.py
index 80602e3..4b4028f 100644
--- a/airflow/kubernetes/pod_generator.py
+++ b/airflow/kubernetes/pod_generator.py
@@ -34,9 +34,9 @@ from dateutil import parser
 from kubernetes.client import models as k8s
 from kubernetes.client.api_client import ApiClient
 
-import airflow.utils.yaml as yaml
 from airflow.exceptions import AirflowConfigException
 from airflow.kubernetes.pod_generator_deprecated import PodDefaults, PodGenerator as PodGeneratorDeprecated
+from airflow.utils import yaml
 from airflow.version import version as airflow_version
 
 MAX_LABEL_LEN = 63
diff --git a/airflow/kubernetes/refresh_config.py b/airflow/kubernetes/refresh_config.py
index 2738b86..a039e7d 100644
--- a/airflow/kubernetes/refresh_config.py
+++ b/airflow/kubernetes/refresh_config.py
@@ -31,7 +31,7 @@ from kubernetes.client import Configuration
 from kubernetes.config.exec_provider import ExecProvider
 from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION, KubeConfigLoader
 
-import airflow.utils.yaml as yaml
+from airflow.utils import yaml
 
 
 def _parse_timestamp(ts_str: str) -> int:
diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py
index f74c5f9..7af23d3 100644
--- a/airflow/models/baseoperator.py
+++ b/airflow/models/baseoperator.py
@@ -46,15 +46,11 @@ from typing import (
 
 import attr
 import jinja2
-
-try:
-    from functools import cached_property
-except ImportError:
-    from cached_property import cached_property
 from dateutil.relativedelta import relativedelta
 from sqlalchemy.orm import Session
 
 import airflow.templates
+from airflow.compat.functools import cached_property
 from airflow.configuration import conf
 from airflow.exceptions import AirflowException
 from airflow.lineage import apply_lineage, prepare_lineage
diff --git a/airflow/operators/bash.py b/airflow/operators/bash.py
index 1cc85e1..66a70a9 100644
--- a/airflow/operators/bash.py
+++ b/airflow/operators/bash.py
@@ -18,11 +18,7 @@
 import os
 from typing import Dict, Optional
 
-try:
-    from functools import cached_property
-except ImportError:
-    from cached_property import cached_property
-
+from airflow.compat.functools import cached_property
 from airflow.exceptions import AirflowException, AirflowSkipException
 from airflow.hooks.subprocess import SubprocessHook
 from airflow.models import BaseOperator
diff --git a/airflow/operators/sql.py b/airflow/operators/sql.py
index 769cf26..8347bcb 100644
--- a/airflow/operators/sql.py
+++ b/airflow/operators/sql.py
@@ -18,11 +18,7 @@
 from distutils.util import strtobool
 from typing import Any, Dict, Iterable, List, Mapping, Optional, SupportsAbs, Union
 
-try:
-    from functools import cached_property
-except ImportError:
-    from cached_property import cached_property
-
+from airflow.compat.functools import cached_property
 from airflow.exceptions import AirflowException
 from airflow.hooks.base import BaseHook
 from airflow.hooks.dbapi import DbApiHook
diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py
index 420135b..9f1469c 100644
--- a/airflow/providers_manager.py
+++ b/airflow/providers_manager.py
@@ -27,7 +27,7 @@ from typing import Any, Dict, NamedTuple, Set
 import jsonschema
 from wtforms import Field
 
-import airflow.utils.yaml as yaml
+from airflow.utils import yaml
 from airflow.utils.entry_points import entry_points_with_dist
 
 try:
diff --git a/airflow/secrets/local_filesystem.py b/airflow/secrets/local_filesystem.py
index 3ec20e1..d23969f 100644
--- a/airflow/secrets/local_filesystem.py
+++ b/airflow/secrets/local_filesystem.py
@@ -25,7 +25,6 @@ from inspect import signature
 from json import JSONDecodeError
 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple
 
-import airflow.utils.yaml as yaml
 from airflow.exceptions import (
     AirflowException,
     AirflowFileParseException,
@@ -33,6 +32,7 @@ from airflow.exceptions import (
     FileSyntaxError,
 )
 from airflow.secrets.base_secrets import BaseSecretsBackend
+from airflow.utils import yaml
 from airflow.utils.file import COMMENT_PATTERN
 from airflow.utils.log.logging_mixin import LoggingMixin
 
diff --git a/airflow/utils/log/log_reader.py b/airflow/utils/log/log_reader.py
index f4096eb..0e1f691 100644
--- a/airflow/utils/log/log_reader.py
+++ b/airflow/utils/log/log_reader.py
@@ -18,11 +18,7 @@
 import logging
 from typing import Dict, Iterator, List, Optional, Tuple
 
-try:
-    from functools import cached_property
-except ImportError:
-    from cached_property import cached_property
-
+from airflow.compat.functools import cached_property
 from airflow.configuration import conf
 from airflow.models import TaskInstance
 from airflow.utils.helpers import render_log_filename
diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py
index 1796cbc..8bb7557 100644
--- a/airflow/utils/log/secrets_masker.py
+++ b/airflow/utils/log/secrets_masker.py
@@ -20,20 +20,7 @@ import logging
 import re
 from typing import TYPE_CHECKING, Iterable, Optional, Set, TypeVar, Union
 
-try:
-    # 3.8+
-    from functools import cached_property
-except ImportError:
-    from cached_property import cached_property
-
-try:
-    # 3.9+
-    from functools import cache
-except ImportError:
-    from functools import lru_cache
-
-    cache = lru_cache(maxsize=None)
-
+from airflow.compat.functools import cache, cached_property
 
 if TYPE_CHECKING:
     from airflow.typing_compat import RePatternType
diff --git a/airflow/utils/yaml.py b/airflow/utils/yaml.py
index e3be61c..c452631 100644
--- a/airflow/utils/yaml.py
+++ b/airflow/utils/yaml.py
@@ -30,7 +30,7 @@ import sys
 from typing import TYPE_CHECKING, Any, BinaryIO, TextIO, Union, cast
 
 if TYPE_CHECKING:
-    from yaml.error import MarkedYAMLError  # noqa
+    from yaml.error import MarkedYAMLError, YAMLError  # noqa
 
 
 def safe_load(stream: Union[bytes, str, BinaryIO, TextIO]) -> Any:
diff --git a/airflow/www/views.py b/airflow/www/views.py
index fdfea42..9bc677e 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -82,7 +82,6 @@ from wtforms import SelectField, validators
 from wtforms.validators import InputRequired
 
 import airflow
-import airflow.utils.yaml as yaml
 from airflow import models, plugins_manager, settings
 from airflow.api.common.experimental.mark_tasks import (
     set_dag_run_state_to_failed,
@@ -103,7 +102,7 @@ from airflow.providers_manager import ProvidersManager
 from airflow.security import permissions
 from airflow.ti_deps.dep_context import DepContext
 from airflow.ti_deps.dependencies_deps import RUNNING_DEPS, SCHEDULER_QUEUED_DEPS
-from airflow.utils import json as utils_json, timezone
+from airflow.utils import json as utils_json, timezone, yaml
 from airflow.utils.dates import infer_time_unit, scale_time_units
 from airflow.utils.docs import get_docs_url
 from airflow.utils.helpers import alchemy_to_dict

[airflow] 03/03: Removes pylint from our toolchain (#16682)

Posted by jh...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jhtimmins pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 6df6369261a52192c36b43ef72db9ce1a39f6477
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Jun 28 19:26:16 2021 +0200

    Removes pylint from our toolchain (#16682)
    
    We've agreed during the voting process that Pylint support
    should be disabled: https://lists.apache.org/thread.html/r9e2cc385db8737ec0874ad09872081bd083593ee29e8303e58d21efb%40%3Cdev.airflow.apache.org%3E
    
    This PR:
    
    * removes all # pylint comments
    * removes pylint pre-commits and related scripts/files
    * removes CI jobs running pylint checks
    * removes documentation about pylint
    * removes unnecessary #noga (adds pre-commit for that)
    * fixes some remaining pydocstyle errors after removing #noqa's
    
    (cherry picked from commit 866a601b76e219b3c043e1dbbc8fb22300866351)
---
 .dockerignore                                      |   2 -
 .github/boring-cyborg.yml                          |   2 +-
 .github/workflows/ci.yml                           |  73 +--
 .pre-commit-config.yaml                            |  63 +--
 .rat-excludes                                      |   3 -
 BREEZE.rst                                         |  30 +-
 CI.rst                                             |   4 +-
 CONTRIBUTING.rst                                   |   2 +-
 CONTRIBUTORS_QUICK_START.rst                       |  14 +-
 PULL_REQUEST_WORKFLOW.rst                          |   6 +-
 STATIC_CODE_CHECKS.rst                             | 116 +---
 airflow/__init__.py                                |  10 +-
 airflow/api/auth/backend/basic_auth.py             |   2 +-
 airflow/api/auth/backend/default.py                |   2 +-
 airflow/api/auth/backend/deny_all.py               |   4 +-
 airflow/api/auth/backend/kerberos_auth.py          |   6 +-
 airflow/api/client/json_client.py                  |   4 +-
 airflow/api/common/experimental/delete_dag.py      |   2 +-
 airflow/api/common/experimental/mark_tasks.py      |   9 +-
 airflow/api/common/experimental/trigger_dag.py     |   2 +-
 .../api_connexion/endpoints/dag_run_endpoint.py    |   4 +-
 airflow/api_connexion/endpoints/health_endpoint.py |   2 +-
 .../endpoints/task_instance_endpoint.py            |   2 +-
 airflow/api_connexion/parameters.py                |   2 +-
 airflow/api_connexion/schemas/common_schema.py     |   4 +-
 airflow/api_connexion/schemas/connection_schema.py |   2 +-
 airflow/api_connexion/schemas/error_schema.py      |   2 +-
 airflow/api_connexion/security.py                  |   2 +-
 airflow/cli/cli_parser.py                          |  13 +-
 airflow/cli/commands/celery_command.py             |   2 +-
 airflow/cli/commands/info_command.py               |   2 +-
 airflow/cli/commands/kubernetes_command.py         |   4 +-
 airflow/cli/commands/plugins_command.py            |   2 +-
 airflow/cli/commands/pool_command.py               |   2 +-
 airflow/cli/commands/role_command.py               |   4 +-
 airflow/cli/commands/sync_perm_command.py          |   4 +-
 airflow/cli/commands/task_command.py               |   4 +-
 airflow/cli/commands/user_command.py               |  19 +-
 airflow/cli/commands/variable_command.py           |   4 +-
 airflow/cli/commands/webserver_command.py          |   6 +-
 airflow/cli/simple_table.py                        |   3 +-
 airflow/compat/functools.py                        |   4 +-
 airflow/configuration.py                           |  28 +-
 airflow/contrib/hooks/aws_athena_hook.py           |   1 -
 airflow/contrib/hooks/aws_datasync_hook.py         |   1 -
 airflow/contrib/hooks/aws_dynamodb_hook.py         |   1 -
 airflow/contrib/hooks/aws_firehose_hook.py         |   1 -
 airflow/contrib/hooks/aws_glue_catalog_hook.py     |   1 -
 airflow/contrib/hooks/aws_hook.py                  |   1 -
 airflow/contrib/hooks/aws_lambda_hook.py           |   1 -
 airflow/contrib/hooks/aws_logs_hook.py             |   1 -
 airflow/contrib/hooks/aws_sns_hook.py              |   1 -
 airflow/contrib/hooks/aws_sqs_hook.py              |   1 -
 .../contrib/hooks/azure_container_instance_hook.py |   1 -
 .../contrib/hooks/azure_container_registry_hook.py |   1 -
 .../contrib/hooks/azure_container_volume_hook.py   |   1 -
 airflow/contrib/hooks/azure_cosmos_hook.py         |   1 -
 airflow/contrib/hooks/azure_data_lake_hook.py      |   1 -
 airflow/contrib/hooks/azure_fileshare_hook.py      |   1 -
 airflow/contrib/hooks/bigquery_hook.py             |   1 -
 airflow/contrib/hooks/cassandra_hook.py            |   1 -
 airflow/contrib/hooks/cloudant_hook.py             |   1 -
 airflow/contrib/hooks/databricks_hook.py           |   1 -
 airflow/contrib/hooks/datadog_hook.py              |   1 -
 airflow/contrib/hooks/datastore_hook.py            |   1 -
 airflow/contrib/hooks/dingding_hook.py             |   1 -
 airflow/contrib/hooks/discord_webhook_hook.py      |   1 -
 airflow/contrib/hooks/emr_hook.py                  |   1 -
 airflow/contrib/hooks/fs_hook.py                   |   1 -
 airflow/contrib/hooks/ftp_hook.py                  |   1 -
 airflow/contrib/hooks/gcp_bigtable_hook.py         |   1 -
 airflow/contrib/hooks/gcp_cloud_build_hook.py      |   1 -
 airflow/contrib/hooks/gcp_dlp_hook.py              |   1 -
 airflow/contrib/hooks/gcp_mlengine_hook.py         |   1 -
 airflow/contrib/hooks/gcp_natural_language_hook.py |   1 -
 airflow/contrib/hooks/gcp_pubsub_hook.py           |   1 -
 airflow/contrib/hooks/gcp_tasks_hook.py            |   1 -
 airflow/contrib/hooks/gcp_translate_hook.py        |   1 -
 .../contrib/hooks/gcp_video_intelligence_hook.py   |   1 -
 airflow/contrib/hooks/gcp_vision_hook.py           |   1 -
 airflow/contrib/hooks/gdrive_hook.py               |   1 -
 airflow/contrib/hooks/grpc_hook.py                 |   1 -
 airflow/contrib/hooks/imap_hook.py                 |   1 -
 airflow/contrib/hooks/jenkins_hook.py              |   1 -
 airflow/contrib/hooks/jira_hook.py                 |   1 -
 airflow/contrib/hooks/mongo_hook.py                |   1 -
 airflow/contrib/hooks/openfaas_hook.py             |   1 -
 airflow/contrib/hooks/opsgenie_alert_hook.py       |   1 -
 airflow/contrib/hooks/pagerduty_hook.py            |   1 -
 airflow/contrib/hooks/pinot_hook.py                |   1 -
 airflow/contrib/hooks/qubole_check_hook.py         |   1 -
 airflow/contrib/hooks/qubole_hook.py               |   1 -
 airflow/contrib/hooks/redis_hook.py                |   1 -
 airflow/contrib/hooks/redshift_hook.py             |   1 -
 airflow/contrib/hooks/sagemaker_hook.py            |   1 -
 airflow/contrib/hooks/salesforce_hook.py           |   1 -
 airflow/contrib/hooks/segment_hook.py              |   1 -
 airflow/contrib/hooks/sftp_hook.py                 |   1 -
 airflow/contrib/hooks/slack_webhook_hook.py        |   1 -
 airflow/contrib/hooks/snowflake_hook.py            |   1 -
 airflow/contrib/hooks/spark_jdbc_hook.py           |   1 -
 airflow/contrib/hooks/spark_sql_hook.py            |   1 -
 airflow/contrib/hooks/spark_submit_hook.py         |   1 -
 airflow/contrib/hooks/sqoop_hook.py                |   1 -
 airflow/contrib/hooks/ssh_hook.py                  |   1 -
 airflow/contrib/hooks/vertica_hook.py              |   1 -
 airflow/contrib/hooks/wasb_hook.py                 |   1 -
 airflow/contrib/hooks/winrm_hook.py                |   1 -
 airflow/contrib/operators/adls_list_operator.py    |   1 -
 airflow/contrib/operators/aws_athena_operator.py   |   1 -
 .../contrib/operators/aws_sqs_publish_operator.py  |   1 -
 airflow/contrib/operators/awsbatch_operator.py     |   1 -
 .../azure_container_instances_operator.py          |   1 -
 airflow/contrib/operators/azure_cosmos_operator.py |   1 -
 .../contrib/operators/bigquery_check_operator.py   |   1 -
 airflow/contrib/operators/bigquery_get_data.py     |   1 -
 airflow/contrib/operators/bigquery_operator.py     |   1 -
 airflow/contrib/operators/bigquery_to_bigquery.py  |   1 -
 .../operators/bigquery_to_mysql_operator.py        |   1 -
 airflow/contrib/operators/databricks_operator.py   |   1 -
 airflow/contrib/operators/dingding_operator.py     |   1 -
 .../contrib/operators/discord_webhook_operator.py  |   1 -
 airflow/contrib/operators/docker_swarm_operator.py |   1 -
 airflow/contrib/operators/druid_operator.py        |   1 -
 airflow/contrib/operators/dynamodb_to_s3.py        |   1 -
 airflow/contrib/operators/ecs_operator.py          |   1 -
 .../contrib/operators/emr_add_steps_operator.py    |   1 -
 .../operators/emr_create_job_flow_operator.py      |   1 -
 .../operators/emr_terminate_job_flow_operator.py   |   1 -
 airflow/contrib/operators/file_to_wasb.py          |   1 -
 airflow/contrib/operators/gcp_bigtable_operator.py |   1 -
 .../contrib/operators/gcp_cloud_build_operator.py  |   1 -
 airflow/contrib/operators/gcp_dlp_operator.py      |   1 -
 airflow/contrib/operators/gcp_tasks_operator.py    |   1 -
 .../contrib/operators/gcp_translate_operator.py    |   1 -
 .../operators/gcp_translate_speech_operator.py     |   3 +-
 .../operators/gcp_video_intelligence_operator.py   |   1 -
 airflow/contrib/operators/gcp_vision_operator.py   |   2 +-
 .../operators/gcs_to_gcs_transfer_operator.py      |   2 -
 .../contrib/operators/gcs_to_gdrive_operator.py    |   1 -
 airflow/contrib/operators/grpc_operator.py         |   1 -
 airflow/contrib/operators/hive_to_dynamodb.py      |   1 -
 .../operators/imap_attachment_to_s3_operator.py    |   1 -
 .../operators/jenkins_job_trigger_operator.py      |   1 -
 airflow/contrib/operators/jira_operator.py         |   1 -
 .../contrib/operators/kubernetes_pod_operator.py   |   1 -
 airflow/contrib/operators/mongo_to_s3.py           |   1 -
 .../contrib/operators/opsgenie_alert_operator.py   |   1 -
 .../oracle_to_azure_data_lake_transfer.py          |   1 -
 .../contrib/operators/oracle_to_oracle_transfer.py |   1 -
 airflow/contrib/operators/qubole_check_operator.py |   1 -
 airflow/contrib/operators/qubole_operator.py       |   1 -
 .../contrib/operators/redis_publish_operator.py    |   1 -
 .../contrib/operators/s3_copy_object_operator.py   |   1 -
 .../operators/s3_delete_objects_operator.py        |   1 -
 airflow/contrib/operators/s3_list_operator.py      |   1 -
 airflow/contrib/operators/s3_to_gcs_operator.py    |   1 -
 .../operators/s3_to_gcs_transfer_operator.py       |   1 -
 airflow/contrib/operators/s3_to_sftp_operator.py   |   1 -
 .../contrib/operators/sagemaker_base_operator.py   |   1 -
 .../sagemaker_endpoint_config_operator.py          |   1 -
 .../operators/sagemaker_endpoint_operator.py       |   1 -
 .../contrib/operators/sagemaker_model_operator.py  |   1 -
 .../operators/sagemaker_training_operator.py       |   1 -
 .../operators/sagemaker_transform_operator.py      |   1 -
 .../contrib/operators/sagemaker_tuning_operator.py |   1 -
 .../operators/segment_track_event_operator.py      |   1 -
 airflow/contrib/operators/sftp_operator.py         |   1 -
 airflow/contrib/operators/sftp_to_s3_operator.py   |   1 -
 .../contrib/operators/slack_webhook_operator.py    |   1 -
 airflow/contrib/operators/snowflake_operator.py    |   1 -
 airflow/contrib/operators/sns_publish_operator.py  |   1 -
 airflow/contrib/operators/spark_jdbc_operator.py   |   1 -
 airflow/contrib/operators/spark_sql_operator.py    |   1 -
 airflow/contrib/operators/spark_submit_operator.py |   1 -
 airflow/contrib/operators/sqoop_operator.py        |   1 -
 airflow/contrib/operators/ssh_operator.py          |   1 -
 airflow/contrib/operators/vertica_operator.py      |   1 -
 airflow/contrib/operators/vertica_to_mysql.py      |   1 -
 .../contrib/operators/wasb_delete_blob_operator.py |   1 -
 airflow/contrib/operators/winrm_operator.py        |   1 -
 airflow/contrib/secrets/aws_secrets_manager.py     |   1 -
 airflow/contrib/secrets/aws_systems_manager.py     |   1 -
 airflow/contrib/secrets/azure_key_vault.py         |   1 -
 airflow/contrib/secrets/gcp_secrets_manager.py     |   1 -
 airflow/contrib/secrets/hashicorp_vault.py         |   1 -
 airflow/contrib/sensors/aws_athena_sensor.py       |   1 -
 .../sensors/aws_glue_catalog_partition_sensor.py   |   1 -
 .../contrib/sensors/aws_redshift_cluster_sensor.py |   1 -
 airflow/contrib/sensors/aws_sqs_sensor.py          |   1 -
 airflow/contrib/sensors/azure_cosmos_sensor.py     |   1 -
 airflow/contrib/sensors/bash_sensor.py             |   1 -
 airflow/contrib/sensors/cassandra_record_sensor.py |   1 -
 airflow/contrib/sensors/cassandra_table_sensor.py  |   1 -
 airflow/contrib/sensors/celery_queue_sensor.py     |   1 -
 airflow/contrib/sensors/datadog_sensor.py          |   1 -
 airflow/contrib/sensors/emr_base_sensor.py         |   1 -
 airflow/contrib/sensors/emr_job_flow_sensor.py     |   1 -
 airflow/contrib/sensors/emr_step_sensor.py         |   1 -
 airflow/contrib/sensors/file_sensor.py             |   1 -
 airflow/contrib/sensors/ftp_sensor.py              |   1 -
 airflow/contrib/sensors/hdfs_sensor.py             |   3 -
 airflow/contrib/sensors/imap_attachment_sensor.py  |   1 -
 airflow/contrib/sensors/jira_sensor.py             |   1 -
 airflow/contrib/sensors/mongo_sensor.py            |   1 -
 airflow/contrib/sensors/pubsub_sensor.py           |   1 -
 airflow/contrib/sensors/python_sensor.py           |   1 -
 airflow/contrib/sensors/qubole_sensor.py           |   1 -
 airflow/contrib/sensors/redis_key_sensor.py        |   1 -
 airflow/contrib/sensors/redis_pub_sub_sensor.py    |   1 -
 airflow/contrib/sensors/sagemaker_base_sensor.py   |   1 -
 .../contrib/sensors/sagemaker_endpoint_sensor.py   |   1 -
 .../contrib/sensors/sagemaker_training_sensor.py   |   1 -
 .../contrib/sensors/sagemaker_transform_sensor.py  |   1 -
 airflow/contrib/sensors/sagemaker_tuning_sensor.py |   1 -
 airflow/contrib/sensors/sftp_sensor.py             |   1 -
 airflow/contrib/sensors/wasb_sensor.py             |   1 -
 airflow/contrib/sensors/weekday_sensor.py          |   1 -
 airflow/contrib/task_runner/cgroup_task_runner.py  |   1 -
 airflow/contrib/utils/gcp_field_sanitizer.py       |   1 -
 airflow/contrib/utils/gcp_field_validator.py       |   1 -
 .../log/task_handler_with_custom_formatter.py      |   1 -
 airflow/contrib/utils/mlengine_operator_utils.py   |   1 -
 .../contrib/utils/mlengine_prediction_summary.py   |   1 -
 airflow/contrib/utils/weekday.py                   |   1 -
 airflow/decorators/__init__.py                     |   4 +-
 airflow/decorators/base.py                         |   2 +-
 airflow/decorators/python.py                       |   2 +-
 airflow/decorators/python_virtualenv.py            |   2 +-
 airflow/decorators/task_group.py                   |   2 +-
 airflow/example_dags/example_branch_labels.py      |   4 +-
 airflow/example_dags/example_branch_operator.py    |   2 +-
 airflow/example_dags/libs/helper.py                |   3 +-
 airflow/example_dags/tutorial_etl_dag.py           |   1 -
 airflow/example_dags/tutorial_taskflow_api_etl.py  |   1 -
 .../tutorial_taskflow_api_etl_virtualenv.py        |   1 -
 airflow/executors/celery_executor.py               |  26 +-
 airflow/executors/debug_executor.py                |  12 +-
 airflow/executors/kubernetes_executor.py           |  18 +-
 airflow/executors/local_executor.py                |  21 +-
 airflow/hooks/S3_hook.py                           |   3 +-
 airflow/hooks/base_hook.py                         |   1 -
 airflow/hooks/dbapi.py                             |   2 +-
 airflow/hooks/dbapi_hook.py                        |   1 -
 airflow/hooks/docker_hook.py                       |   1 -
 airflow/hooks/druid_hook.py                        |   1 -
 airflow/hooks/hdfs_hook.py                         |   1 -
 airflow/hooks/hive_hooks.py                        |   1 -
 airflow/hooks/http_hook.py                         |   1 -
 airflow/hooks/jdbc_hook.py                         |   1 -
 airflow/hooks/mssql_hook.py                        |   1 -
 airflow/hooks/mysql_hook.py                        |   1 -
 airflow/hooks/oracle_hook.py                       |   1 -
 airflow/hooks/pig_hook.py                          |   1 -
 airflow/hooks/postgres_hook.py                     |   1 -
 airflow/hooks/presto_hook.py                       |   1 -
 airflow/hooks/samba_hook.py                        |   1 -
 airflow/hooks/slack_hook.py                        |   1 -
 airflow/hooks/sqlite_hook.py                       |   1 -
 airflow/hooks/subprocess.py                        |   3 +-
 airflow/hooks/webhdfs_hook.py                      |   1 -
 airflow/hooks/zendesk_hook.py                      |   1 -
 airflow/jobs/__init__.py                           |   6 +-
 airflow/jobs/backfill_job.py                       |  13 +-
 airflow/jobs/base_job.py                           |   2 +-
 airflow/jobs/local_task_job.py                     |  10 +-
 airflow/jobs/scheduler_job.py                      |  25 +-
 airflow/kubernetes/kube_client.py                  |   7 +-
 airflow/kubernetes/kube_config.py                  |   4 +-
 airflow/kubernetes/pod.py                          |   7 +-
 airflow/kubernetes/pod_generator.py                |   9 +-
 airflow/kubernetes/pod_generator_deprecated.py     |   2 +-
 airflow/kubernetes/pod_launcher.py                 |   4 +-
 airflow/kubernetes/pod_runtime_info_env.py         |   2 +-
 airflow/kubernetes/refresh_config.py               |   6 +-
 airflow/kubernetes/volume.py                       |   2 +-
 airflow/kubernetes/volume_mount.py                 |   2 +-
 airflow/lineage/__init__.py                        |   4 +-
 airflow/lineage/backend.py                         |   2 +-
 airflow/lineage/entities.py                        |   5 +-
 airflow/logging_config.py                          |   2 +-
 airflow/migrations/env.py                          |   3 +-
 .../versions/03bc53e68815_add_sm_dag_index.py      |   4 +-
 .../versions/05f30312d566_merge_heads.py           |   4 +-
 .../0a2a5b66e19d_add_task_reschedule_table.py      |  10 +-
 .../0e2a74e0fc9f_add_time_zone_awareness.py        |   4 +-
 ...dfa7_add_dag_id_state_index_on_dag_run_table.py |   4 +-
 .../versions/13eb55f81627_for_compatibility.py     |   4 +-
 .../versions/1507a7289a2f_create_is_encrypted.py   |   4 +-
 ...cfc09e3_add_is_encrypted_column_to_variable_.py |   4 +-
 .../migrations/versions/1b38cef5b76e_add_dagrun.py |   4 +-
 .../versions/211e584da130_add_ti_state_index.py    |   4 +-
 ...30d7c24_add_executor_config_to_task_instance.py |   4 +-
 .../versions/2e541a1dcfed_task_duration.py         |   4 +-
 .../versions/2e82aab8ef20_rename_user_table.py     |   4 +-
 ...338e90f54d61_more_logging_into_task_isntance.py |   4 +-
 ...7a1ff4_add_kubernetes_resource_checkpointing.py |   4 +-
 .../versions/40e67319e3a9_dagrun_config.py         |   4 +-
 .../versions/41f5f12752f8_add_superuser_field.py   |   4 +-
 .../versions/4446e08588_dagrun_start_end.py        |   4 +-
 ...236f1_add_fractional_seconds_to_mysql_tables.py |   8 +-
 .../versions/502898887f84_adding_extra_to_log.py   |   4 +-
 ...0_fix_mssql_exec_date_rendered_task_instance.py |   8 +-
 .../versions/52d714495f0_job_id_indices.py         |   4 +-
 .../561833c1c74b_add_password_column_to_user.py    |   4 +-
 ...73d9401f_add_description_field_to_connection.py |   2 +-
 ...7aae_fix_description_field_in_connection_to_.py |   8 +-
 .../64de9cddf6c9_add_task_fails_journal_table.py   |   4 +-
 .../849da589634d_prefix_dag_permissions.py         |  30 +-
 ...15af_add_rendered_task_instance_fields_table.py |   6 +-
 .../856955da8476_fix_sqlite_foreign_key.py         |   2 +-
 .../8646922c8a04_change_default_pool_slots_to_1.py |   4 +-
 ...0d1215c0_add_kubernetes_scheduler_uniqueness.py |   4 +-
 ...1e647c8_task_reschedule_fk_on_cascade_delete.py |   4 +-
 .../versions/947454bf1dff_add_ti_job_id_index.py   |   4 +-
 .../versions/952da73b5eff_add_dag_code_table.py    |   2 +-
 .../versions/9635ae0956e7_index_faskfail.py        |   4 +-
 ...606e2_add_scheduling_decision_to_dagrun_and_.py |   4 +-
 ...67d16b_add_pool_slots_field_to_task_instance.py |   4 +-
 .../versions/b0125267960b_merge_heads.py           |   4 +-
 ...cfc896_add_a_column_to_track_the_encryption_.py |   4 +-
 ...13e_add_notification_sent_column_to_sla_miss.py |   4 +-
 ...3e6c56_make_xcom_value_column_a_large_binary.py |   4 +-
 .../bf00311e1990_add_index_to_taskinstance.py      |   4 +-
 .../versions/c8ffec048a3b_add_fields_to_dag.py     |   4 +-
 ...623dc7_add_max_tries_column_to_task_instance.py |   8 +-
 .../versions/cf5dc11e79ad_drop_user_and_chart.py   |   4 +-
 .../d2ae31099d61_increase_text_size_for_mysql.py   |   8 +-
 .../d38e04c12aa2_add_serialized_dag_table.py       |  12 +-
 .../versions/dd25f486b8ea_add_idx_log_dag.py       |   4 +-
 .../dd4ecb8fbee3_add_schedule_interval_to_dag.py   |   4 +-
 .../e38be357a868_update_schema_for_smart_sensor.py |  10 +-
 .../versions/e3a246e0dc1_current_schema.py         |   4 +-
 ...ac86c_change_field_in_dagcode_to_mediumtext_.py |   8 +-
 .../f23433877c24_fix_mysql_not_null_constraint.py  |   4 +-
 .../versions/f2ca10b85618_add_dag_stats_table.py   |   4 +-
 airflow/models/__init__.py                         |   2 +-
 airflow/models/baseoperator.py                     |  55 +-
 airflow/models/connection.py                       |  14 +-
 airflow/models/crypto.py                           |   4 +-
 airflow/models/dag.py                              |   2 +-
 airflow/models/dagbag.py                           |  10 +-
 airflow/models/dagrun.py                           |   2 +-
 airflow/models/errors.py                           |   2 +-
 airflow/models/pool.py                             |   2 +-
 airflow/models/serialized_dag.py                   |   7 +-
 airflow/models/skipmixin.py                        |   2 +-
 airflow/models/taskinstance.py                     |  39 +-
 airflow/models/variable.py                         |   6 +-
 airflow/models/xcom_arg.py                         |   2 +-
 airflow/mypy/plugin/decorators.py                  |   8 +-
 airflow/operators/bash_operator.py                 |   1 -
 airflow/operators/branch_operator.py               |   1 -
 airflow/operators/dagrun_operator.py               |   1 -
 airflow/operators/docker_operator.py               |   1 -
 airflow/operators/druid_check_operator.py          |   1 -
 airflow/operators/dummy_operator.py                |   1 -
 airflow/operators/email.py                         |   6 +-
 airflow/operators/email_operator.py                |   1 -
 airflow/operators/gcs_to_s3.py                     |   1 -
 airflow/operators/hive_operator.py                 |   1 -
 airflow/operators/hive_stats_operator.py           |   1 -
 airflow/operators/hive_to_druid.py                 |   1 -
 airflow/operators/hive_to_mysql.py                 |   1 -
 airflow/operators/hive_to_samba_operator.py        |   1 -
 airflow/operators/http_operator.py                 |   1 -
 airflow/operators/jdbc_operator.py                 |   1 -
 airflow/operators/latest_only_operator.py          |   1 -
 airflow/operators/mssql_operator.py                |   1 -
 airflow/operators/mysql_operator.py                |   1 -
 airflow/operators/oracle_operator.py               |   1 -
 airflow/operators/papermill_operator.py            |   1 -
 airflow/operators/pig_operator.py                  |   1 -
 airflow/operators/postgres_operator.py             |   1 -
 airflow/operators/presto_check_operator.py         |   3 +-
 airflow/operators/presto_to_mysql.py               |   1 -
 airflow/operators/python.py                        |   4 +-
 airflow/operators/python_operator.py               |   1 -
 airflow/operators/s3_file_transform_operator.py    |   1 -
 airflow/operators/slack_operator.py                |   1 -
 airflow/operators/sqlite_operator.py               |   1 -
 airflow/operators/subdag_operator.py               |   1 -
 airflow/plugins_manager.py                         |  31 +-
 .../example_google_api_to_s3_transfer_advanced.py  |   3 +-
 airflow/providers/amazon/aws/hooks/athena.py       |   8 +-
 airflow/providers/amazon/aws/hooks/aws_dynamodb.py |   1 -
 airflow/providers/amazon/aws/hooks/base_aws.py     |   4 +-
 airflow/providers/amazon/aws/hooks/batch_client.py |  12 +-
 airflow/providers/amazon/aws/hooks/glue.py         |   2 +-
 airflow/providers/amazon/aws/hooks/glue_crawler.py |   4 +-
 airflow/providers/amazon/aws/hooks/redshift.py     |   2 +-
 airflow/providers/amazon/aws/hooks/s3.py           |   4 +-
 airflow/providers/amazon/aws/hooks/sagemaker.py    |  10 +-
 airflow/providers/amazon/aws/hooks/ses.py          |   2 +-
 .../amazon/aws/log/cloudwatch_task_handler.py      |   4 +-
 .../providers/amazon/aws/log/s3_task_handler.py    |  10 +-
 airflow/providers/amazon/aws/operators/athena.py   |   4 +-
 airflow/providers/amazon/aws/operators/batch.py    |   6 +-
 airflow/providers/amazon/aws/operators/datasync.py |   1 -
 airflow/providers/amazon/aws/operators/ecs.py      |   7 +-
 airflow/providers/amazon/aws/operators/glue.py     |   2 +-
 .../amazon/aws/operators/s3_file_transform.py      |   2 +-
 .../amazon/aws/operators/sagemaker_base.py         |   4 +-
 .../providers/amazon/aws/sensors/sagemaker_base.py |   2 +-
 .../amazon/aws/transfers/dynamodb_to_s3.py         |   4 +-
 .../providers/amazon/aws/transfers/exasol_to_s3.py |   2 +-
 .../providers/amazon/aws/transfers/gcs_to_s3.py    |   2 +-
 .../amazon/aws/transfers/hive_to_dynamodb.py       |   2 +-
 .../providers/amazon/aws/transfers/mongo_to_s3.py  |   1 -
 .../amazon/aws/transfers/redshift_to_s3.py         |   2 +-
 airflow/providers/apache/beam/hooks/beam.py        |   4 +-
 airflow/providers/apache/beam/operators/beam.py    |  15 +-
 .../providers/apache/cassandra/hooks/cassandra.py  |   2 +-
 .../apache/druid/transfers/hive_to_druid.py        |   2 +-
 airflow/providers/apache/hdfs/hooks/hdfs.py        |   2 +-
 airflow/providers/apache/hdfs/hooks/webhdfs.py     |   2 +-
 airflow/providers/apache/hdfs/sensors/hdfs.py      |   2 +-
 airflow/providers/apache/hive/hooks/hive.py        |  10 +-
 airflow/providers/apache/hive/operators/hive.py    |   1 -
 .../apache/hive/transfers/mssql_to_hive.py         |   3 +-
 .../apache/hive/transfers/mysql_to_hive.py         |   2 +-
 .../providers/apache/hive/transfers/s3_to_hive.py  |   4 +-
 .../providers/apache/kylin/operators/kylin_cube.py |   1 -
 airflow/providers/apache/livy/hooks/livy.py        |   3 -
 airflow/providers/apache/livy/operators/livy.py    |   1 -
 airflow/providers/apache/pinot/hooks/pinot.py      |   4 +-
 airflow/providers/apache/spark/hooks/spark_jdbc.py |   2 -
 .../apache/spark/hooks/spark_jdbc_script.py        |   2 -
 airflow/providers/apache/spark/hooks/spark_sql.py  |   3 +-
 .../providers/apache/spark/hooks/spark_submit.py   |   3 -
 .../providers/apache/spark/operators/spark_jdbc.py |   2 -
 .../providers/apache/spark/operators/spark_sql.py  |   1 -
 .../apache/spark/operators/spark_submit.py         |   2 -
 airflow/providers/apache/sqoop/hooks/sqoop.py      |   3 -
 airflow/providers/apache/sqoop/operators/sqoop.py  |   2 -
 airflow/providers/asana/hooks/asana.py             |  16 +-
 .../backcompat/backwards_compat_converters.py      |   2 +-
 .../cncf/kubernetes/operators/kubernetes_pod.py    |   4 +-
 airflow/providers/databricks/hooks/databricks.py   |   2 +-
 .../providers/databricks/operators/databricks.py   |   2 -
 airflow/providers/datadog/hooks/datadog.py         |   1 -
 .../example_dags/example_docker_copy_data.py       |   2 +-
 airflow/providers/docker/operators/docker.py       |   6 +-
 .../providers/elasticsearch/log/es_task_handler.py |  14 +-
 airflow/providers/ftp/hooks/ftp.py                 |   4 +-
 .../cloud/example_dags/example_automl_tables.py    |  28 +-
 .../cloud/example_dags/example_bigquery_dts.py     |   8 +-
 .../google/cloud/example_dags/example_bigtable.py  |   4 +-
 .../cloud/example_dags/example_cloud_build.py      |   4 +-
 .../google/cloud/example_dags/example_cloud_sql.py |  44 +-
 .../google/cloud/example_dags/example_tasks.py     |   2 +-
 .../google/cloud/example_dags/example_vision.py    |  10 +-
 airflow/providers/google/cloud/hooks/automl.py     |   2 +-
 airflow/providers/google/cloud/hooks/bigquery.py   |  39 +-
 airflow/providers/google/cloud/hooks/bigtable.py   |   1 -
 .../providers/google/cloud/hooks/cloud_build.py    |   9 +-
 airflow/providers/google/cloud/hooks/cloud_sql.py  |  37 +-
 .../cloud/hooks/cloud_storage_transfer_service.py  |  35 +-
 airflow/providers/google/cloud/hooks/compute.py    |  21 +-
 .../providers/google/cloud/hooks/compute_ssh.py    |  28 +-
 .../providers/google/cloud/hooks/datacatalog.py    |   4 +-
 airflow/providers/google/cloud/hooks/dataflow.py   |  19 +-
 airflow/providers/google/cloud/hooks/datafusion.py |  17 +-
 airflow/providers/google/cloud/hooks/dataproc.py   |   6 +-
 airflow/providers/google/cloud/hooks/datastore.py  |  46 +-
 airflow/providers/google/cloud/hooks/dlp.py        |   1 -
 airflow/providers/google/cloud/hooks/functions.py  |  14 +-
 airflow/providers/google/cloud/hooks/gcs.py        |  14 +-
 airflow/providers/google/cloud/hooks/gdm.py        |  12 +-
 .../google/cloud/hooks/kubernetes_engine.py        |   2 +-
 .../providers/google/cloud/hooks/life_sciences.py  |   9 +-
 airflow/providers/google/cloud/hooks/mlengine.py   |  27 +-
 airflow/providers/google/cloud/hooks/pubsub.py     |  19 +-
 .../providers/google/cloud/hooks/secret_manager.py |   2 +-
 airflow/providers/google/cloud/hooks/spanner.py    |   4 +-
 .../providers/google/cloud/hooks/text_to_speech.py |   6 +-
 airflow/providers/google/cloud/hooks/vision.py     |  17 +-
 airflow/providers/google/cloud/hooks/workflows.py  |   4 -
 .../providers/google/cloud/log/gcs_task_handler.py |  11 +-
 .../google/cloud/log/stackdriver_task_handler.py   |   6 +-
 airflow/providers/google/cloud/operators/automl.py |   6 +-
 .../providers/google/cloud/operators/bigquery.py   |   8 +-
 .../providers/google/cloud/operators/bigtable.py   |   2 +-
 .../operators/cloud_storage_transfer_service.py    |   4 +-
 .../google/cloud/operators/datacatalog.py          |   8 +-
 .../providers/google/cloud/operators/dataflow.py   |  36 +-
 .../providers/google/cloud/operators/datafusion.py |   2 +-
 .../providers/google/cloud/operators/dataproc.py   |  13 +-
 .../providers/google/cloud/operators/datastore.py  |   2 +-
 airflow/providers/google/cloud/operators/dlp.py    |   2 +-
 .../providers/google/cloud/operators/mlengine.py   |   6 +-
 airflow/providers/google/cloud/operators/pubsub.py |   5 +-
 .../google/cloud/operators/stackdriver.py          |   2 -
 airflow/providers/google/cloud/operators/tasks.py  |   2 +-
 .../providers/google/cloud/operators/workflows.py  |   4 -
 .../google/cloud/secrets/secret_manager.py         |   2 +-
 airflow/providers/google/cloud/sensors/dataproc.py |   2 +-
 airflow/providers/google/cloud/sensors/gcs.py      |   4 +-
 airflow/providers/google/cloud/sensors/pubsub.py   |   2 +-
 .../cloud/transfers/azure_fileshare_to_gcs.py      |   1 -
 .../google/cloud/transfers/bigquery_to_bigquery.py |   2 +-
 .../google/cloud/transfers/bigquery_to_gcs.py      |   2 +-
 .../google/cloud/transfers/bigquery_to_mysql.py    |   2 +-
 .../google/cloud/transfers/cassandra_to_gcs.py     |   8 +-
 .../google/cloud/transfers/gcs_to_bigquery.py      |   2 -
 .../providers/google/cloud/transfers/gcs_to_gcs.py |   2 +-
 .../google/cloud/transfers/gcs_to_local.py         |   4 +-
 .../google/cloud/transfers/gcs_to_sftp.py          |   1 -
 .../google/cloud/transfers/oracle_to_gcs.py        |   2 +-
 .../providers/google/cloud/transfers/s3_to_gcs.py  |   2 -
 .../providers/google/cloud/transfers/sql_to_gcs.py |   7 +-
 .../google/cloud/utils/credentials_provider.py     |  13 +-
 .../google/cloud/utils/field_sanitizer.py          |   2 +-
 .../google/cloud/utils/mlengine_operator_utils.py  |   4 +-
 .../cloud/utils/mlengine_prediction_summary.py     |   7 +-
 .../google/common/auth_backend/google_openid.py    |   2 +-
 .../providers/google/common/hooks/base_google.py   |  18 +-
 .../providers/google/firebase/hooks/firestore.py   |   8 +-
 .../google/marketing_platform/hooks/analytics.py   |  14 +-
 .../marketing_platform/hooks/campaign_manager.py   |  24 +-
 .../marketing_platform/hooks/display_video.py      |  44 +-
 .../google/marketing_platform/hooks/search_ads.py  |  16 +-
 .../operators/campaign_manager.py                  |   2 +-
 airflow/providers/google/suite/hooks/drive.py      |  12 +-
 airflow/providers/google/suite/hooks/sheets.py     |  18 +-
 airflow/providers/grpc/hooks/grpc.py               |   4 +-
 .../hashicorp/_internal_client/vault_client.py     |   6 +-
 airflow/providers/hashicorp/hooks/vault.py         |   4 +-
 airflow/providers/hashicorp/secrets/vault.py       |   5 +-
 .../jenkins/operators/jenkins_job_trigger.py       |   4 +-
 airflow/providers/jira/sensors/jira.py             |   4 +-
 .../providers/microsoft/azure/hooks/azure_batch.py |   2 +-
 .../microsoft/azure/hooks/azure_data_factory.py    |   2 +-
 airflow/providers/microsoft/azure/hooks/wasb.py    |   2 +-
 .../microsoft/azure/log/wasb_task_handler.py       |   2 +-
 .../microsoft/azure/operators/azure_batch.py       |   3 +-
 .../azure/operators/azure_container_instances.py   |   7 +-
 .../azure/transfers/oracle_to_azure_data_lake.py   |   1 -
 airflow/providers/microsoft/mssql/hooks/mssql.py   |  16 +-
 airflow/providers/microsoft/winrm/hooks/winrm.py   |   3 +-
 .../providers/microsoft/winrm/operators/winrm.py   |   3 +-
 airflow/providers/mysql/hooks/mysql.py             |  14 +-
 .../providers/mysql/transfers/vertica_to_mysql.py  |   2 +-
 airflow/providers/odbc/hooks/odbc.py               |   1 -
 .../providers/opsgenie/operators/opsgenie_alert.py |   1 -
 airflow/providers/oracle/hooks/oracle.py           |   5 +-
 .../providers/oracle/transfers/oracle_to_oracle.py |   1 -
 airflow/providers/pagerduty/hooks/pagerduty.py     |   1 -
 airflow/providers/postgres/hooks/postgres.py       |   1 -
 airflow/providers/presto/hooks/presto.py           |  10 +-
 airflow/providers/qubole/hooks/qubole.py           |   5 +-
 airflow/providers/qubole/hooks/qubole_check.py     |   2 +-
 airflow/providers/qubole/operators/qubole_check.py |   4 +-
 airflow/providers/qubole/sensors/qubole.py         |   6 +-
 airflow/providers/salesforce/hooks/tableau.py      |   1 -
 .../operators/tableau_refresh_workbook.py          |   1 -
 .../salesforce/sensors/tableau_job_status.py       |   1 -
 .../segment/operators/segment_track_event.py       |   1 -
 airflow/providers/sendgrid/utils/emailer.py        |   2 +-
 .../providers/singularity/operators/singularity.py |   2 +-
 airflow/providers/slack/hooks/slack.py             |   4 +-
 airflow/providers/slack/hooks/slack_webhook.py     |   1 -
 airflow/providers/slack/operators/slack.py         |   4 +-
 airflow/providers/slack/operators/slack_webhook.py |   1 -
 airflow/providers/snowflake/hooks/snowflake.py     |  12 +-
 .../snowflake/transfers/snowflake_to_slack.py      |   2 +-
 airflow/providers/ssh/hooks/ssh.py                 |   4 +-
 airflow/providers/trino/hooks/trino.py             |  10 +-
 airflow/providers/vertica/hooks/vertica.py         |   2 +-
 .../yandex/operators/yandexcloud_dataproc.py       |   7 -
 airflow/providers/zendesk/hooks/zendesk.py         |   1 -
 airflow/providers_manager.py                       |  20 +-
 airflow/secrets/base_secrets.py                    |   2 +-
 airflow/secrets/environment_variables.py           |   1 -
 airflow/secrets/metastore.py                       |   2 -
 airflow/security/kerberos.py                       |   2 +-
 airflow/security/utils.py                          |  10 +-
 airflow/sensors/base.py                            |   2 +-
 airflow/sensors/base_sensor_operator.py            |   1 -
 airflow/sensors/bash.py                            |   2 +-
 airflow/sensors/date_time_sensor.py                |   1 -
 airflow/sensors/external_task.py                   |   4 +-
 airflow/sensors/external_task_sensor.py            |   1 -
 airflow/sensors/hdfs_sensor.py                     |   1 -
 airflow/sensors/hive_partition_sensor.py           |   1 -
 airflow/sensors/http_sensor.py                     |   1 -
 airflow/sensors/metastore_partition_sensor.py      |   1 -
 airflow/sensors/named_hive_partition_sensor.py     |   1 -
 airflow/sensors/s3_key_sensor.py                   |   1 -
 airflow/sensors/s3_prefix_sensor.py                |   1 -
 airflow/sensors/smart_sensor.py                    |  18 +-
 airflow/sensors/sql_sensor.py                      |   1 -
 airflow/sensors/time_delta_sensor.py               |   1 -
 airflow/sensors/web_hdfs_sensor.py                 |   1 -
 airflow/sentry.py                                  |   2 +-
 airflow/serialization/json_schema.py               |   3 +-
 airflow/serialization/serialized_objects.py        |  23 +-
 airflow/settings.py                                |  25 +-
 airflow/stats.py                                   |  12 +-
 airflow/task/task_runner/__init__.py               |   2 +-
 airflow/task/task_runner/base_task_runner.py       |   3 +-
 airflow/task/task_runner/standard_task_runner.py   |   8 +-
 airflow/ti_deps/deps/not_previously_skipped_dep.py |   2 +-
 airflow/ti_deps/deps/task_not_running_dep.py       |   2 +-
 airflow/ti_deps/deps/trigger_rule_dep.py           |   2 +-
 airflow/typing_compat.py                           |   9 +-
 airflow/utils/cli.py                               |  10 +-
 airflow/utils/cli_action_loggers.py                |   6 +-
 airflow/utils/dag_processing.py                    |  14 +-
 airflow/utils/dates.py                             |   3 +-
 airflow/utils/db.py                                |  19 +-
 airflow/utils/decorators.py                        |   2 +-
 airflow/utils/edgemodifier.py                      |   2 +-
 airflow/utils/event_scheduler.py                   |   2 +-
 airflow/utils/file.py                              |   8 +-
 airflow/utils/helpers.py                           |   6 +-
 airflow/utils/log/cloudwatch_task_handler.py       |   1 -
 airflow/utils/log/es_task_handler.py               |   1 -
 airflow/utils/log/file_processor_handler.py        |   2 +-
 airflow/utils/log/file_task_handler.py             |  10 +-
 airflow/utils/log/gcs_task_handler.py              |   1 -
 airflow/utils/log/json_formatter.py                |   1 -
 airflow/utils/log/logging_mixin.py                 |   9 +-
 airflow/utils/log/s3_task_handler.py               |   1 -
 airflow/utils/log/secrets_masker.py                |   4 +-
 airflow/utils/log/stackdriver_task_handler.py      |   1 -
 .../log/task_handler_with_custom_formatter.py      |   2 +-
 airflow/utils/log/wasb_task_handler.py             |   1 -
 airflow/utils/orm_event_handlers.py                |   4 +-
 airflow/utils/platform.py                          |   2 +-
 airflow/utils/process_utils.py                     |   4 +-
 airflow/utils/session.py                           |   2 +-
 airflow/utils/sqlalchemy.py                        |   1 -
 airflow/utils/task_group.py                        |   2 +-
 airflow/utils/timeout.py                           |   4 +-
 airflow/utils/types.py                             |   2 +-
 airflow/utils/weekday.py                           |   1 -
 airflow/www/api/experimental/endpoints.py          |   2 +-
 airflow/www/app.py                                 |   4 +-
 airflow/www/auth.py                                |   2 +-
 airflow/www/decorators.py                          |   4 +-
 airflow/www/extensions/init_jinja_globals.py       |   2 +-
 airflow/www/extensions/init_manifest_files.py      |   4 +-
 airflow/www/forms.py                               |   2 +-
 airflow/www/gunicorn_config.py                     |   6 +-
 airflow/www/security.py                            |   8 +-
 airflow/www/utils.py                               |  57 +-
 airflow/www/views.py                               | 204 +++----
 breeze                                             |   8 +-
 breeze-complete                                    |   5 +-
 chart/tests/helm_template_generator.py             |   4 +-
 chart/tests/test_basic_helm_chart.py               |   2 +-
 dev/import_all_classes.py                          |   2 +-
 dev/provider_packages/prepare_provider_packages.py |  14 +-
 dev/retag_docker_images.py                         |   2 +-
 dev/send_email.py                                  |   4 +-
 docs/build_docs.py                                 |  24 +-
 docs/conf.py                                       |   7 +-
 docs/exts/__init__.py                              |   1 -
 docs/exts/airflow_intersphinx.py                   |   4 +-
 docs/exts/docroles.py                              |   8 +-
 docs/exts/docs_build/__init__.py                   |   1 -
 docs/exts/docs_build/dev_index_generator.py        |   2 +-
 docs/exts/docs_build/docs_builder.py               |   8 +-
 docs/exts/docs_build/errors.py                     |   2 +-
 docs/exts/docs_build/fetch_inventories.py          |   8 +-
 docs/exts/docs_build/lint_checks.py                |   4 +-
 docs/exts/docs_build/spelling_checks.py            |   2 +-
 docs/exts/exampleinclude.py                        |  16 +-
 docs/exts/operators_and_hooks_ref.py               |   5 +-
 docs/exts/providers_packages_ref.py                |   2 +-
 docs/exts/removemarktransform.py                   |   5 +-
 docs/exts/substitution_extensions.py               |   2 +-
 docs/publish_docs.py                               |   3 -
 docs/spelling_wordlist.txt                         |   2 -
 kubernetes_tests/test_kubernetes_pod_operator.py   |  18 +-
 .../test_kubernetes_pod_operator_backcompat.py     |   8 +-
 pylintrc                                           | 597 --------------------
 pylintrc-tests                                     | 606 ---------------------
 scripts/ci/docker-compose/local.yml                |   2 -
 scripts/ci/libraries/_all_libs.sh                  |   2 -
 scripts/ci/libraries/_build_images.sh              |   2 +-
 scripts/ci/libraries/_local_mounts.sh              |   2 -
 scripts/ci/libraries/_pylint.sh                    |  37 --
 .../pre_commit_check_extras_have_providers.py      |   2 +-
 .../pre_commit_check_setup_extra_packages_ref.py   |   2 +-
 scripts/ci/pre_commit/pre_commit_insert_extras.py  |   2 +-
 scripts/ci/pre_commit/pre_commit_pylint.sh         |  26 -
 scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py    |   2 +-
 scripts/ci/pylint_todo.txt                         |   5 -
 scripts/ci/static_checks/pylint.sh                 |  56 --
 scripts/ci/static_checks/refresh_pylint_todo.sh    |  33 --
 scripts/in_container/_in_container_utils.sh        |  57 --
 scripts/in_container/refresh_pylint_todo.sh        |  21 -
 scripts/in_container/run_pylint.sh                 |  49 --
 .../in_container/update_quarantined_test_status.py |   4 +-
 scripts/tools/generate-integrations-json.py        |   3 -
 scripts/tools/list-integrations.py                 |   2 +-
 setup.py                                           |  23 +-
 tests/api/auth/backend/test_basic_auth.py          |   2 +-
 tests/api/common/experimental/test_mark_tasks.py   |   2 +-
 tests/api_connexion/endpoints/test_dag_endpoint.py |   4 +-
 .../endpoints/test_dag_run_endpoint.py             |   2 +-
 .../endpoints/test_extra_link_endpoint.py          |   4 +-
 .../endpoints/test_import_error_endpoint.py        |   2 +-
 tests/api_connexion/endpoints/test_log_endpoint.py |   6 +-
 .../api_connexion/endpoints/test_task_endpoint.py  |   2 +-
 .../endpoints/test_task_instance_endpoint.py       |  10 +-
 .../api_connexion/endpoints/test_xcom_endpoint.py  |   2 +-
 tests/api_connexion/schemas/test_error_schema.py   |   2 +-
 tests/api_connexion/test_basic_auth.py             |   2 +-
 tests/build_provider_packages_dependencies.py      |   6 +-
 tests/cli/commands/test_celery_command.py          |   2 +-
 tests/cli/commands/test_info_command.py            |   2 +-
 tests/cli/commands/test_role_command.py            |   2 +-
 tests/cli/commands/test_user_command.py            |   2 +-
 tests/cli/commands/test_webserver_command.py       |  59 +-
 tests/conftest.py                                  |   6 +-
 tests/core/test_core.py                            |   8 +-
 tests/core/test_logging_config.py                  |   7 +-
 tests/core/test_settings.py                        |   8 +-
 tests/dags/subdir1/test_ignore_this.py             |   2 +-
 tests/dags/test_mark_success.py                    |   4 +-
 tests/dags/test_on_failure_callback.py             |   2 +-
 tests/dags/test_subdag.py                          |   2 +-
 tests/decorators/test_python.py                    |  57 +-
 tests/decorators/test_python_virtualenv.py         |  28 +-
 tests/executors/test_celery_executor.py            |  11 +-
 tests/executors/test_kubernetes_executor.py        |  15 +-
 tests/executors/test_local_executor.py             |  10 +-
 tests/hooks/test_subprocess.py                     |   2 +-
 tests/jobs/test_backfill_job.py                    |   1 -
 tests/jobs/test_local_task_job.py                  |   8 +-
 tests/jobs/test_scheduler_job.py                   |  76 +--
 tests/models/test_baseoperator.py                  |  14 +-
 tests/models/test_connection.py                    |   5 +-
 tests/models/test_dag.py                           |  13 +-
 tests/models/test_dagbag.py                        |  10 +-
 tests/models/test_dagparam.py                      |   3 -
 tests/models/test_pool.py                          |  16 +-
 tests/models/test_taskinstance.py                  |  10 +-
 tests/models/test_xcom.py                          |   4 +-
 tests/operators/test_email.py                      |   2 +-
 tests/operators/test_python.py                     |  28 +-
 tests/operators/test_sql.py                        |   1 -
 tests/operators/test_weekday.py                    |   2 +-
 tests/plugins/test_plugins_manager.py              |   4 +-
 tests/providers/amazon/aws/hooks/conftest.py       |   1 -
 tests/providers/amazon/aws/hooks/test_base_aws.py  |   8 +-
 .../amazon/aws/hooks/test_batch_client.py          |   1 -
 .../amazon/aws/hooks/test_batch_waiters.py         |   2 -
 tests/providers/amazon/aws/hooks/test_s3.py        |  18 +-
 .../amazon/aws/log/test_s3_task_handler.py         |  20 +-
 .../providers/amazon/aws/operators/test_athena.py  |   4 -
 tests/providers/amazon/aws/operators/test_batch.py |   1 -
 tests/providers/amazon/aws/operators/test_ecs.py   |  14 +-
 .../amazon/aws/operators/test_glacier_system.py    |   2 +-
 .../aws/operators/test_sagemaker_training.py       |   1 -
 tests/providers/amazon/aws/sensors/test_s3_key.py  |   2 +-
 .../amazon/aws/transfers/test_dynamodb_to_s3.py    |   2 +-
 tests/providers/apache/beam/hooks/test_beam.py     |  12 +-
 .../apache/cassandra/hooks/test_cassandra.py       |   2 +-
 tests/providers/apache/druid/hooks/test_druid.py   |   2 +-
 tests/providers/apache/hive/hooks/test_hive.py     |   2 +-
 .../apache/hive/transfers/test_mssql_to_hive.py    |   8 +-
 tests/providers/apache/livy/hooks/test_livy.py     |  18 +-
 tests/providers/apache/pinot/hooks/test_pinot.py   |   2 +-
 tests/providers/asana/hooks/test_asana.py          |  38 +-
 .../kubernetes/operators/test_kubernetes_pod.py    |   1 -
 tests/providers/docker/hooks/test_docker.py        |   8 +-
 tests/providers/docker/operators/test_docker.py    |   2 +-
 .../elasticsearch/hooks/test_elasticsearch.py      |   2 +-
 .../elasticsearch/log/elasticmock/__init__.py      |   2 +-
 .../log/elasticmock/fake_elasticsearch.py          |   6 +-
 .../elasticsearch/log/test_es_task_handler.py      |   6 +-
 .../_internal_client/test_secret_manager_client.py |   3 +-
 tests/providers/google/cloud/hooks/test_automl.py  |   2 +-
 .../providers/google/cloud/hooks/test_bigquery.py  |   5 +-
 .../google/cloud/hooks/test_cloud_build.py         |  10 +-
 .../google/cloud/hooks/test_cloud_memorystore.py   |  26 +-
 .../providers/google/cloud/hooks/test_cloud_sql.py |  63 +--
 .../hooks/test_cloud_storage_transfer_service.py   |  19 +-
 tests/providers/google/cloud/hooks/test_compute.py |   1 -
 .../google/cloud/hooks/test_datacatalog.py         |  84 +--
 .../providers/google/cloud/hooks/test_dataflow.py  |  34 +-
 .../google/cloud/hooks/test_datafusion.py          |   2 -
 .../providers/google/cloud/hooks/test_dataprep.py  |  18 +-
 .../providers/google/cloud/hooks/test_dataproc.py  |   2 +-
 .../providers/google/cloud/hooks/test_datastore.py |  20 +-
 tests/providers/google/cloud/hooks/test_dlp.py     |  30 +-
 .../providers/google/cloud/hooks/test_functions.py |   8 +-
 tests/providers/google/cloud/hooks/test_gcs.py     |  18 +-
 tests/providers/google/cloud/hooks/test_gdm.py     |   2 +-
 tests/providers/google/cloud/hooks/test_kms.py     |   2 +-
 .../google/cloud/hooks/test_kubernetes_engine.py   |   4 +-
 .../google/cloud/hooks/test_life_sciences.py       |  11 +-
 tests/providers/google/cloud/hooks/test_pubsub.py  |   6 +-
 .../google/cloud/hooks/test_secret_manager.py      |   1 -
 .../google/cloud/hooks/test_stackdriver.py         |  10 +-
 .../providers/google/cloud/hooks/test_workflows.py |   2 +-
 .../google/cloud/operators/test_cloud_sql.py       |   1 -
 .../test_cloud_storage_transfer_service.py         |  30 +-
 .../google/cloud/operators/test_compute.py         |   7 +-
 .../google/cloud/operators/test_dataprep_system.py |   2 +-
 .../google/cloud/operators/test_dataproc.py        |   4 +-
 tests/providers/google/cloud/operators/test_dlp.py |   2 +-
 .../google/cloud/operators/test_dlp_system.py      |   2 +-
 .../cloud/operators/test_kubernetes_engine.py      |   7 -
 .../google/cloud/operators/test_spanner.py         |  14 +-
 .../google/cloud/operators/test_speech_to_text.py  |   6 +-
 .../test_azure_fileshare_to_gcs_system.py          |   2 +-
 .../google/cloud/transfers/test_gcs_to_sftp.py     |   1 -
 .../google/cloud/transfers/test_mssql_to_gcs.py    |   2 +-
 .../google/cloud/transfers/test_mysql_to_gcs.py    |   6 +-
 .../google/cloud/transfers/test_oracle_to_gcs.py   |   4 +-
 .../google/cloud/transfers/test_postgres_to_gcs.py |   2 +-
 .../google/cloud/transfers/test_presto_to_gcs.py   |   4 +-
 .../google/cloud/transfers/test_sftp_to_gcs.py     |   1 -
 .../google/cloud/transfers/test_trino_to_gcs.py    |   4 +-
 .../google/cloud/utils/gcp_authenticator.py        |   2 +-
 .../common/auth_backend/test_google_openid.py      |   2 +-
 .../google/common/hooks/test_base_google.py        |   4 +-
 tests/providers/grpc/hooks/test_grpc.py            |   6 +-
 .../_internal_client/test_vault_client.py          |   3 +-
 tests/providers/hashicorp/hooks/test_vault.py      |   1 -
 tests/providers/imap/hooks/test_imap.py            |   2 +-
 tests/providers/jdbc/hooks/test_jdbc.py            |   2 +-
 tests/providers/jira/hooks/test_jira.py            |   2 +-
 tests/providers/jira/sensors/test_jira.py          |   2 +-
 .../azure/hooks/test_azure_data_factory.py         |   1 -
 .../microsoft/azure/operators/test_azure_batch.py  |   2 +-
 tests/providers/mysql/operators/test_mysql.py      |   2 +-
 tests/providers/odbc/hooks/test_odbc.py            |   1 -
 tests/providers/oracle/hooks/test_oracle.py        |   1 -
 tests/providers/postgres/hooks/test_postgres.py    |   2 +-
 tests/providers/qubole/hooks/test_qubole.py        |   1 -
 .../qubole/operators/test_qubole_check.py          |   1 -
 tests/providers/sftp/operators/test_sftp.py        |   6 +-
 tests/providers/snowflake/hooks/test_snowflake.py  |   2 +-
 tests/providers/sqlite/hooks/test_sqlite.py        |   2 +-
 tests/providers/ssh/hooks/test_ssh.py              |   6 +-
 tests/providers/ssh/operators/test_ssh.py          |   8 +-
 tests/providers/tableau/hooks/test_tableau.py      |   2 +-
 tests/security/test_kerberos.py                    |   8 +-
 tests/sensors/test_external_task_sensor.py         |   4 +-
 tests/serialization/test_dag_serialization.py      |   4 +-
 .../task/task_runner/test_standard_task_runner.py  |   2 +-
 tests/test_utils/asserts.py                        |   2 +-
 tests/test_utils/fake_datetime.py                  |   2 +-
 tests/test_utils/hdfs_utils.py                     |   2 +-
 tests/test_utils/perf/dags/elastic_dag.py          |   1 -
 tests/test_utils/perf/perf_kit/memory.py           |   2 +-
 tests/test_utils/perf/perf_kit/sqlalchemy.py       |  36 +-
 .../perf/scheduler_dag_execution_timing.py         |  10 +-
 tests/test_utils/remote_user_api_auth_backend.py   |   2 +-
 tests/test_utils/reset_warning_registry.py         |   6 +-
 tests/ti_deps/deps/fake_models.py                  |   8 +-
 .../deps/test_dag_ti_slots_available_dep.py        |   2 +-
 tests/ti_deps/deps/test_dag_unpaused_dep.py        |   2 +-
 tests/ti_deps/deps/test_dagrun_exists_dep.py       |   2 +-
 tests/ti_deps/deps/test_dagrun_id_dep.py           |   2 +-
 tests/ti_deps/deps/test_not_in_retry_period_dep.py |   2 +-
 .../ti_deps/deps/test_pool_slots_available_dep.py  |   5 +-
 tests/ti_deps/deps/test_prev_dagrun_dep.py         |   2 +-
 tests/ti_deps/deps/test_ready_to_reschedule_dep.py |   2 +-
 tests/ti_deps/deps/test_runnable_exec_date_dep.py  |   2 +-
 tests/ti_deps/deps/test_task_concurrency.py        |   2 +-
 tests/ti_deps/deps/test_task_not_running_dep.py    |   2 +-
 tests/ti_deps/deps/test_trigger_rule_dep.py        |   2 +-
 tests/ti_deps/deps/test_valid_state_dep.py         |   2 +-
 tests/utils/log/test_log_reader.py                 |   2 +-
 tests/utils/test_dag_processing.py                 |   2 +-
 tests/utils/test_edgemodifier.py                   |  10 +-
 tests/utils/test_module_loading.py                 |   2 +-
 tests/utils/test_retries.py                        |   4 +-
 tests/utils/test_task_group.py                     |   2 -
 tests/www/api/experimental/test_endpoints.py       |   2 +-
 tests/www/test_security.py                         |   6 +-
 tests/www/views/conftest.py                        |   4 +-
 tests/www/views/test_views_acl.py                  |  22 +-
 tests/www/views/test_views_extra_links.py          |   4 +-
 tests/www/views/test_views_log.py                  |   4 +-
 tests/www/views/test_views_rendered.py             |   2 +-
 tests/www/views/test_views_tasks.py                |   2 +-
 884 files changed, 1901 insertions(+), 4470 deletions(-)

diff --git a/.dockerignore b/.dockerignore
index 6e42e74..d10cfbc 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -51,8 +51,6 @@
 !.rat-excludes
 !.flake8
 !.dockerignore
-!pylintrc
-!pylintrc-tests
 !pytest.ini
 !CHANGELOG.txt
 !LICENSE
diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml
index 7d97c2f..d5f7632 100644
--- a/.github/boring-cyborg.yml
+++ b/.github/boring-cyborg.yml
@@ -203,7 +203,7 @@ firstPRWelcomeComment: >
 
   Here are some useful points:
 
-  - Pay attention to the quality of your code (flake8, pylint and type annotations). Our [pre-commits](
+  - Pay attention to the quality of your code (flake8, mypy and type annotations). Our [pre-commits](
   https://github.com/apache/airflow/blob/main/STATIC_CODE_CHECKS.rst#prerequisites-for-pre-commit-hooks)
   will help you with that.
 
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a3144f4..569dc17 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -308,7 +308,7 @@ jobs:
     needs: [build-info, ci-images]
     env:
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-      SKIP: "pylint,identity"
+      SKIP: "identity"
       MOUNT_SELECTED_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
       GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
@@ -338,20 +338,20 @@ jobs:
 ${{ hashFiles('setup.py', 'setup.cfg') }}"
           restore-keys: "\
 pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-"
-      - name: "Cache pre-commit envs: no-pylint"
+      - name: "Cache pre-commit envs"
         uses: actions/cache@v2
         with:
           path: ~/.cache/pre-commit
-          key: "pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}}-\
+          key: "pre-commit-${{steps.host-python-version.outputs.host-python-version}}-\
 ${{ hashFiles('.pre-commit-config.yaml') }}"
-          restore-keys: pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}}
+          restore-keys: pre-commit-${{steps.host-python-version.outputs.host-python-version}}
 
       - name: "Cache eslint"
         uses: actions/cache@v2
         with:
           path: 'airflow/ui/node_modules'
           key: ${{ runner.os }}-ui-node-modules-${{ hashFiles('airflow/ui/**/yarn.lock') }}
-      - name: "Static checks: except pylint"
+      - name: "Static checks"
         run: ./scripts/ci/static_checks/run_static_checks.sh
         env:
           VERBOSE: false
@@ -366,7 +366,7 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     needs: [build-info]
     env:
       RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-      SKIP: "build,mypy,flake8,pylint,bats-in-container-tests,identity"
+      SKIP: "build,mypy,flake8,bats-in-container-tests,identity"
       MOUNT_SELECTED_LOCAL_SOURCES: "true"
       PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
     if: needs.build-info.outputs.basic-checks-only == 'true'
@@ -398,68 +398,17 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
 ${{ hashFiles('setup.py', 'setup.cfg') }}"
           restore-keys: "\
 pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-"
-      - name: "Cache pre-commit envs: no-pylint"
+      - name: "Cache pre-commit envs"
         uses: actions/cache@v2
         with:
           path: ~/.cache/pre-commit
-          key: "pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}}-\
+          key: "pre-commit-basic-${{steps.host-python-version.outputs.host-python-version}}-\
 ${{ hashFiles('.pre-commit-config.yaml') }}"
-          restore-keys: pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}}
+          restore-keys: pre-commit-basic-${{steps.host-python-version.outputs.host-python-version}}
       - name: "Static checks: basic checks only"
         run: ./scripts/ci/static_checks/run_basic_static_checks.sh "${{ github.sha }}"
         env:
           VERBOSE: false
-
-  static-checks-pylint:
-    timeout-minutes: 60
-    name: "Pylint"
-    runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-    needs: [build-info, ci-images]
-    if: needs.build-info.outputs.basic-checks-only == 'false'
-    env:
-      RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }}
-      # We want to make sure we have latest sources as only in_container scripts are added
-      # to the image but we want to static-check all of them
-      MOUNT_SELECTED_LOCAL_SOURCES: "true"
-      PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}}
-      GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }}
-    steps:
-      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
-        uses: actions/checkout@v2
-        with:
-          persist-credentials: false
-      - name: "Setup python"
-        uses: actions/setup-python@v2
-        with:
-          python-version: ${{needs.build-info.outputs.defaultPythonVersion}}
-      - name: "Free space"
-        run: ./scripts/ci/tools/ci_free_space_on_ci.sh
-      - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}"
-        run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh
-      - name: "Get Python version"
-        run: "echo \"::set-output name=host-python-version::$(python -c
- 'import platform; print(platform.python_version())')\""
-        id: host-python-version
-      - name: "Cache pre-commit local-installation"
-        uses: actions/cache@v2
-        with:
-          path: ~/.local
-          key: "pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-\
-${{ hashFiles('setup.py', 'setup.cfg') }}"
-          restore-keys: "\
-pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-"
-      - name: "Cache pre-commit envs - pylint"
-        uses: actions/cache@v2
-        with:
-          path: ~/.cache/pre-commit
-          key: "pre-commit-pylint-${{steps.host-python-version.outputs.host-python-version}}-\
-${{ hashFiles('.pre-commit-config.yaml') }}"
-          restore-keys: pre-commit-pylint-${{steps.host-python-version.outputs.host-python-version}}
-      - name: "Static checks: pylint"
-        run: ./scripts/ci/static_checks/run_static_checks.sh pylint
-        env:
-          VERBOSE: false
-
   docs:
     timeout-minutes: 45
     name: "Build docs"
@@ -1042,7 +991,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     needs:
       - build-info
       - static-checks
-      - static-checks-pylint
       - tests-sqlite
       - tests-postgres
       - tests-mysql
@@ -1105,7 +1053,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
     needs:
       - build-info
       - static-checks
-      - static-checks-pylint
       - tests-sqlite
       - tests-postgres
       - tests-mysql
@@ -1152,7 +1099,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       - ci-images
       - prod-images
       - static-checks
-      - static-checks-pylint
       - tests-sqlite
       - tests-mysql
       - tests-postgres
@@ -1221,7 +1167,6 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
       - docs
       - build-info
       - static-checks
-      - static-checks-pylint
       - tests-sqlite
       - tests-postgres
       - tests-mysql
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f27aeb0..88d98a7 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -218,7 +218,7 @@ repos:
         name: Run pydocstyle
         args:
           - --convention=pep257
-          - --add-ignore=D100,D102,D104,D105,D107,D205,D400,D401
+          - --add-ignore=D100,D102,D103,D104,D105,D107,D205,D400,D401
         exclude: |
           (?x)
           ^tests/.*\.py$|
@@ -229,6 +229,14 @@ repos:
           .*example_dags/.*|
           ^chart/.*\.py$|
           ^airflow/_vendor/
+        additional_dependencies: ['toml']
+  - repo: https://github.com/asottile/yesqa
+    rev: v1.2.3
+    hooks:
+      - id: yesqa
+        exclude: |
+          (?x)
+          ^airflow/_vendor/
   - repo: local
     hooks:
       - id: lint-openapi
@@ -339,14 +347,7 @@ repos:
           ^docs/apache-airflow-providers-apache-cassandra/connections/cassandra\.rst$|
           ^docs/apache-airflow-providers-apache-hive/commits\.rst$|
           git|
-          ^pylintrc |
           ^CHANGELOG\.txt$
-      - id: consistent-pylint
-        language: pygrep
-        name: Check for inconsistent pylint disable/enable without space
-        entry: "pylint:disable|pylint:enable"
-        pass_filenames: true
-        files: \.py$
       - id: base-operator
         language: pygrep
         name: Check BaseOperator[Link] core imports
@@ -609,11 +610,19 @@ repos:
         additional_dependencies: ['flynt==0.63']
         files: \.py$
         exclude: ^airflow/_vendor/
-      - id: bats-in-container-tests
-        name: Run in container bats tests
-        language: system
-        entry: ./scripts/ci/pre_commit/pre_commit_in_container_bats_test.sh
-        files: ^tests/bats/in_container/.*\.bats$|^scripts/in_container/.*sh
+      - id: ui-lint
+        name: ESLint against airflow/ui
+        language: node
+        'types_or': [javascript, tsx, ts]
+        files: ^airflow/ui/
+        entry: scripts/ci/static_checks/ui_lint.sh
+        pass_filenames: false
+      - id: www-lint
+        name: ESLint against current UI js files
+        language: node
+        'types_or': [javascript]
+        files: ^airflow/www/static/js/
+        entry: scripts/ci/static_checks/www_lint.sh
         pass_filenames: false
         ## ADD MOST PRE-COMMITS ABOVE THAT LINE
         # The below pre-commits are those requiring CI image to be built
@@ -623,6 +632,12 @@ repos:
         language: system
         always_run: true
         pass_filenames: false
+      - id: bats-in-container-tests
+        name: Run in container bats tests
+        language: system
+        entry: ./scripts/ci/pre_commit/pre_commit_in_container_bats_test.sh
+        files: ^tests/bats/in_container/.*\.bats$|^scripts/in_container/.*sh
+        pass_filenames: false
       - id: mypy
         name: Run mypy
         language: system
@@ -642,28 +657,6 @@ repos:
         files: ^docs/.*\.py$
         exclude: rtd-deprecation
         require_serial: false
-      - id: pylint
-        name: Run pylint for main code
-        language: system
-        entry: ./scripts/ci/pre_commit/pre_commit_pylint.sh
-        files: \.py$
-        exclude: ^scripts/.*\.py$|^dev|^provider_packages|^chart|^tests|^kubernetes_tests|^airflow/_vendor/
-        pass_filenames: true
-        require_serial: true
-      - id: pylint
-        name: Run pylint for tests
-        language: system
-        entry: env PYLINTRC=pylintrc-tests ./scripts/ci/pre_commit/pre_commit_pylint.sh
-        files: ^tests/.*\.py$
-        pass_filenames: true
-        require_serial: true
-      - id: pylint
-        name: Run pylint for helm chart tests
-        language: system
-        entry: env PYLINTRC=pylintrc-tests ./scripts/ci/pre_commit/pre_commit_pylint.sh
-        files: ^chart/.*\.py$
-        pass_filenames: true
-        require_serial: true
       - id: flake8
         name: Run flake8
         language: system
diff --git a/.rat-excludes b/.rat-excludes
index 145bdf6..70bd984 100644
--- a/.rat-excludes
+++ b/.rat-excludes
@@ -83,9 +83,6 @@ PROVIDER_CHANGES*.md
 manifests/*
 redirects.txt
 
-# Temporary list of files to make compatible with Pylint
-pylint_todo.txt
-
 # Locally mounted files
 .*egg-info/*
 .bash_history
diff --git a/BREEZE.rst b/BREEZE.rst
index 549addd..a5b3733 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -2239,22 +2239,22 @@ This is the current syntax for  `./breeze <./breeze>`_:
         Run selected static checks for currently changed files. You should specify static check that
         you would like to run or 'all' to run all checks. One of:
 
-                 all all-but-pylint airflow-config-yaml airflow-providers-available
-                 airflow-provider-yaml-files-ok base-operator bats-tests bats-in-container-tests
-                 black build build-providers-dependencies check-apache-license check-builtin-literals
+                 all airflow-config-yaml airflow-providers-available airflow-provider-yaml-files-ok
+                 base-operator bats-tests bats-in-container-tests black blacken-docs boring-cyborg
+                 build build-providers-dependencies check-apache-license check-builtin-literals
                  check-executables-have-shebangs check-hooks-apply check-integrations
-                 check-merge-conflict check-xml consistent-pylint daysago-import-check
-                 debug-statements detect-private-key doctoc dont-use-safe-filter end-of-file-fixer
-                 fix-encoding-pragma flake8 flynt forbid-tabs helm-lint identity
-                 incorrect-use-of-LoggingMixin insert-license isort json-schema language-matters
-                 lint-dockerfile lint-openapi markdownlint mermaid mixed-line-ending mypy mypy-helm
-                 no-providers-in-core-examples no-relative-imports pre-commit-descriptions
-                 pre-commit-hook-names provide-create-sessions providers-changelogs
-                 providers-init-file provider-yamls pydevd pydocstyle pylint pylint-tests
-                 python-no-log-warn pyupgrade restrict-start_date rst-backticks setup-order
-                 setup-extra-packages shellcheck sort-in-the-wild sort-spelling-wordlist stylelint
-                 trailing-whitespace ui-lint update-breeze-file update-extras update-local-yml-file
-                 update-setup-cfg-file verify-db-migrations-documented version-sync yamllint
+                 check-merge-conflict check-xml daysago-import-check debug-statements
+                 detect-private-key doctoc dont-use-safe-filter end-of-file-fixer fix-encoding-pragma
+                 flake8 flynt forbid-tabs helm-lint identity incorrect-use-of-LoggingMixin
+                 insert-license isort json-schema language-matters lint-dockerfile lint-openapi
+                 markdownlint mermaid mixed-line-ending mypy mypy-helm no-providers-in-core-examples
+                 no-relative-imports pre-commit-descriptions pre-commit-hook-names pretty-format-json
+                 provide-create-sessions providers-changelogs providers-init-file provider-yamls
+                 pydevd pydocstyle python-no-log-warn pyupgrade restrict-start_date rst-backticks
+                 setup-order setup-extra-packages shellcheck sort-in-the-wild sort-spelling-wordlist
+                 stylelint trailing-whitespace ui-lint update-breeze-file update-extras
+                 update-local-yml-file update-setup-cfg-file verify-db-migrations-documented
+                 version-sync www-lint yamllint yesqa
 
         You can pass extra arguments including options to the pre-commit framework as
         <EXTRA_ARGS> passed after --. For example:
diff --git a/CI.rst b/CI.rst
index 260d570..1ab37d1 100644
--- a/CI.rst
+++ b/CI.rst
@@ -669,9 +669,7 @@ This workflow is a regular workflow that performs all checks of Airflow code.
 +---------------------------+----------------------------------------------+-------+-------+------+
 | CI Images                 | Waits for CI Images (3)                      | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
-| Static checks             | Performs static checks without pylint        | Yes   | Yes   | Yes  |
-+---------------------------+----------------------------------------------+-------+-------+------+
-| Static checks: pylint     | Performs pylint static checks                | Yes   | Yes   | Yes  |
+| Static checks             | Performs static checks                       | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
 | Build docs                | Builds documentation                         | Yes   | Yes   | Yes  |
 +---------------------------+----------------------------------------------+-------+-------+------+
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 21cd85e..3cb96ee 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -925,7 +925,7 @@ as described in the static code checks documentation.
 Coding style and best practices
 ===============================
 
-Most of our coding style rules are enforced programmatically by flake8 and pylint (which are run automatically
+Most of our coding style rules are enforced programmatically by flake8 and mypy (which are run automatically
 on every pull request), but there are some rules that are not yet automated and are more Airflow specific or
 semantic than style
 
diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst
index d487b2d..40fd98e 100644
--- a/CONTRIBUTORS_QUICK_START.rst
+++ b/CONTRIBUTORS_QUICK_START.rst
@@ -614,8 +614,7 @@ All Tests are inside ./tests directory.
       entrypoint_exec.sh*                         run_install_and_test_provider_packages.sh*
       _in_container_script_init.sh*               run_mypy.sh*
       prod/                                       run_prepare_provider_packages.sh*
-      refresh_pylint_todo.sh*                     run_prepare_provider_documentation.sh*
-      run_ci_tests.sh*                            run_pylint.sh*
+      run_ci_tests.sh*                            run_prepare_provider_documentation.sh*
       run_clear_tmp.sh*                           run_system_tests.sh*
       run_docs_build.sh*                          run_tmux_welcome.sh*
       run_extract_tests.sh*                       stop_tmux_airflow.sh*
@@ -812,8 +811,7 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r
       entrypoint_exec.sh*                         run_install_and_test_provider_packages.sh*
       _in_container_script_init.sh*               run_mypy.sh*
       prod/                                       run_prepare_provider_packages.sh*
-      refresh_pylint_todo.sh*                     run_prepare_provider_documentation.sh*
-      run_ci_tests.sh*                            run_pylint.sh*
+      run_ci_tests.sh*                            run_prepare_provider_documentation.sh*
       run_clear_tmp.sh*                           run_system_tests.sh*
       run_docs_build.sh*                          run_tmux_welcome.sh*
       run_extract_tests.sh*                       stop_tmux_airflow.sh*
@@ -857,14 +855,6 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r
    <a href="https://github.com/apache/airflow/blob/main/STATIC_CODE_CHECKS.rst#pre-commit-hooks" target="_blank">
    Pre-commit Hooks</a>
 
-  - |Pylint Static Code Checks|
-
-  .. |Pylint Static Code Checks| raw:: html
-
-   <a href="https://github.com/apache/airflow/blob/main/STATIC_CODE_CHECKS.rst#pylint-static-code-checks"
-   target="_blank">Pylint Static Code Checks</a>
-
-
   - |Running Static Code Checks via Breeze|
 
   .. |Running Static Code Checks via Breeze| raw:: html
diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst
index 3e3cce6..0f48967 100644
--- a/PULL_REQUEST_WORKFLOW.rst
+++ b/PULL_REQUEST_WORKFLOW.rst
@@ -109,7 +109,7 @@ We have the following test types (separated by packages in which they are):
 
 We also have several special kinds of tests that are not separated by packages but they are marked with
 pytest markers. They can be found in any of those packages and they can be selected by the appropriate
-pylint custom command line options. See `TESTING.rst <TESTING.rst>`_ for details but those are:
+pytest custom command line options. See `TESTING.rst <TESTING.rst>`_ for details but those are:
 
 * Integration - tests that require external integration images running in docker-compose
 * Quarantined - tests that are flaky and need to be fixed
@@ -175,11 +175,11 @@ The logic implemented for the changes works as follows:
     Quarantined tests are described in `TESTING.rst <TESTING.rst>`_
 
 11) There is a special case of static checks. In case the above logic determines that the CI image
-    needs to be build, we run long and more comprehensive version of static checks - including Pylint,
+    needs to be build, we run long and more comprehensive version of static checks - including
     Mypy, Flake8. And those tests are run on all files, no matter how many files changed.
     In case the image is not built, we run only simpler set of changes - the longer static checks
     that require CI image are skipped, and we only run the tests on the files that changed in the incoming
-    commit - unlike pylint/flake8/mypy, those static checks are per-file based and they should not miss any
+    commit - unlike flake8/mypy, those static checks are per-file based and they should not miss any
     important change.
 
 Similarly to selective tests we also run selective security scans. In Pull requests,
diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst
index 2c743d4..5ac8f70 100644
--- a/STATIC_CODE_CHECKS.rst
+++ b/STATIC_CODE_CHECKS.rst
@@ -60,7 +60,11 @@ require Breeze Docker images to be installed locally:
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``black``                             Runs Black (the uncompromising Python code formatter)
 ----------------------------------- ---------------------------------------------------------------- ------------
-``build``                             Builds image for mypy, pylint, flake8                                *
+``blacken-docs``                      Run black on python code blocks in documentation files
+----------------------------------- ---------------------------------------------------------------- ------------
+``boring-cyborg``                     Checks for Boring Cyborg configuration consistency
+----------------------------------- ---------------------------------------------------------------- ------------
+``build``                             Builds image for mypy, flake8                                        *
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``build-providers-dependencies``      Regenerates the JSON file with cross-provider dependencies
 ----------------------------------- ---------------------------------------------------------------- ------------
@@ -78,8 +82,6 @@ require Breeze Docker images to be installed locally:
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``check-xml``                         Checks XML files with xmllint
 ----------------------------------- ---------------------------------------------------------------- ------------
-``consistent-pylint``                 Consistent usage of pylint enable/disable with space
------------------------------------ ---------------------------------------------------------------- ------------
 ``daysago-import-check``              Checks if daysago is properly imported
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``debug-statements``                  Detects accidentally committed debug statements
@@ -150,8 +152,6 @@ require Breeze Docker images to be installed locally:
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``pydocstyle``                        Runs pydocstyle
 ----------------------------------- ---------------------------------------------------------------- ------------
-``pylint``                            Runs pylint check                                                    *
------------------------------------ ---------------------------------------------------------------- ------------
 ``python-no-log-warn``                Checks if there are no deprecate log warn
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``restrict-start_date``               'start_date' should not be in default_args in example_dags
@@ -188,6 +188,8 @@ require Breeze Docker images to be installed locally:
 ----------------------------------- ---------------------------------------------------------------- ------------
 ``www-lint``                          Static checks of js in airflow/www/static/js/ folder
 ----------------------------------- ---------------------------------------------------------------- ------------
+``yesqa``                             Removes unnecessary noqa statements
+----------------------------------- ---------------------------------------------------------------- ------------
 ``yamllint``                          Checks YAML files with yamllint
 =================================== ================================================================ ============
 
@@ -288,7 +290,7 @@ code. But you can run pre-commit hooks manually as needed.
 
 .. code-block:: bash
 
-    SKIP=pylint,mypy pre-commit run --all-files
+    SKIP=mypy,flake8,build pre-commit run --all-files
 
 
 You can always skip running the tests by providing ``--no-verify`` flag to the
@@ -296,67 +298,6 @@ You can always skip running the tests by providing ``--no-verify`` flag to the
 
 To check other usage types of the pre-commit framework, see `Pre-commit website <https://pre-commit.com/>`__.
 
-Pylint Static Code Checks
--------------------------
-
-We are in the process of fixing the code flagged with pylint checks for the whole Airflow project.
-This is a huge task so we implemented an incremental approach for the process.
-Currently most of the code is excluded from pylint checks via scripts/ci/pylint_todo.txt.
-We have an open JIRA issue AIRFLOW-4364 which has a number of sub-tasks for each of
-the modules that should be made compatible. Fixing problems identified with pylint is one of
-straightforward and easy tasks to do (but time-consuming), so if you are a first-time
-contributor to Airflow, you can choose one of the sub-tasks as your first issue to fix.
-
-To fix a pylint issue, do the following:
-
-1.  Remove module/modules from the
-    `scripts/ci/static_checks/pylint_todo.txt <scripts/ci/pylint_todo.txt>`__.
-
-2.  Run `<scripts/ci/static_checks/pylint.sh>`__.
-
-3.  Fix all the issues reported by pylint.
-
-4.  Re-run `<scripts/ci/static_checks/pylint.sh>`__.
-
-5.  If you see "success", submit a PR following
-    `Pull Request guidelines <#pull-request-guidelines>`__.
-
-
-These are guidelines for fixing errors reported by pylint:
-
--   Fix the errors rather than disable pylint checks. Often you can easily
-    refactor the code (IntelliJ/PyCharm might be helpful when extracting methods
-    in complex code or moving methods around).
-
--   If disabling a particular problem, make sure to disable only that error by
-    using the symbolic name of the error as reported by pylint.
-
-.. code-block:: python
-
-    import airflow.*  # pylint: disable=wildcard-import
-
-
--   If there is a single line where you need to disable a particular error,
-    consider adding a comment to the line that causes the problem. For example:
-
-.. code-block:: python
-
-    def  MakeSummary(pcoll, metric_fn, metric_keys): # pylint: disable=invalid-name
-
-
--   For multiple lines/block of code, to disable an error, you can surround the
-    block with ``pylint:disable/pylint:enable`` comment lines. For example:
-
-.. code-block:: python
-
-    # pylint: disable=too-few-public-methods
-    class  LoginForm(Form):
-        """Form for the user"""
-        username = StringField('Username', [InputRequired()])
-        password = PasswordField('Password', [InputRequired()])
-    # pylint: enable=too-few-public-methods
-
-
 Running Static Code Checks via Breeze
 -------------------------------------
 
@@ -364,14 +305,8 @@ The static code checks can be launched using the Breeze environment.
 
 You run the static code checks via ``./breeze static-check`` or commands.
 
-Note that it may take a lot of time to run checks for all files with pylint on macOS due to a slow
-filesystem for macOS Docker. As a workaround, you can add their arguments after ``--`` as extra arguments.
-For example ``--files`` flag. By default those checks are run only on the files you've changed in your
-commit, but you can also add ``-- --all-files`` flag to run check on all files.
-
 You can see the list of available static checks either via ``--help`` flag or by using the autocomplete
-option. Note that the ``all`` static check runs all configured static checks. Also since pylint tests take
-a lot of time, you can run a special ``all-but-pylint`` check that skips pylint checks.
+option. Note that the ``all`` static check runs all configured static checks.
 
 Run the ``mypy`` check for the currently staged changes:
 
@@ -409,30 +344,11 @@ Run all tests for all files:
 
      ./breeze static-check all -- --all-files
 
-Run all tests but pylint for all files:
-
-.. code-block:: bash
-
-     ./breeze static-check all-but-pylint --all-files
-
-Run pylint checks for all changed files:
-
-.. code-block:: bash
-
-     ./breeze static-check pylint
-
-Run pylint checks for selected files:
-
-.. code-block:: bash
-
-     ./breeze static-check pylint -- --files airflow/configuration.py
-
-
-Run pylint checks for all files:
+Run all tests for last commit :
 
 .. code-block:: bash
 
-     ./breeze static-check pylint -- --all-files
+     ./breeze static-check all -- --ref-from HEAD^ --ref-to HEAD
 
 
 The ``license`` check is run via a separate script and a separate Docker image containing the
@@ -454,7 +370,6 @@ this, run the following scripts:
 * `<scripts/ci/static_checks/flake8.sh>`_ - runs Flake8 source code style enforcement tool.
 * `<scripts/ci/static_checks/lint_dockerfile.sh>`_ - runs lint checker for the dockerfiles.
 * `<scripts/ci/static_checks/mypy.sh>`_ - runs a check for Mypy type annotation consistency.
-* `<scripts/ci/static_checks/pylint.sh>`_ - runs Pylint static code checker.
 
 The scripts may ask you to rebuild the images, if needed.
 
@@ -471,7 +386,6 @@ If you are already in the Breeze Docker environment (by running the ``./breeze``
 you can also run the same static checks via run_scripts:
 
 * Mypy: ``./scripts/in_container/run_mypy.sh airflow tests``
-* Pylint: ``./scripts/in_container/run_pylint.sh``
 * Flake8: ``./scripts/in_container/run_flake8.sh``
 * License check: ``./scripts/in_container/run_check_licence.sh``
 * Documentation: ``./scripts/in_container/run_docs_build.sh``
@@ -486,20 +400,20 @@ In the Docker container:
 
 .. code-block::
 
-  ./scripts/in_container/run_pylint.sh ./airflow/example_dags/
+  ./scripts/in_container/run_mypy.sh ./airflow/example_dags/
 
 or
 
 .. code-block::
 
-  ./scripts/in_container/run_pylint.sh ./airflow/example_dags/test_utils.py
+  ./scripts/in_container/run_mypy.sh ./airflow/example_dags/test_utils.py
 
 On the host:
 
 .. code-block::
 
-  ./scripts/ci/static_checks/pylint.sh ./airflow/example_dags/
+  ./scripts/ci/static_checks/mypy.sh ./airflow/example_dags/
 
 .. code-block::
 
-  ./scripts/ci/static_checks/pylint.sh ./airflow/example_dags/test_utils.py
+  ./scripts/ci/static_checks/mypy.sh ./airflow/example_dags/test_utils.py
diff --git a/airflow/__init__.py b/airflow/__init__.py
index 6d04886..a448491 100644
--- a/airflow/__init__.py
+++ b/airflow/__init__.py
@@ -27,7 +27,7 @@ isort:skip_file
 """
 
 # flake8: noqa: F401
-# pylint: disable=wrong-import-position
+
 import sys
 from typing import Callable, Optional
 
@@ -56,11 +56,11 @@ PY39 = sys.version_info >= (3, 9)
 def __getattr__(name):
     # PEP-562: Lazy loaded attributes on python modules
     if name == "DAG":
-        from airflow.models.dag import DAG  # pylint: disable=redefined-outer-name
+        from airflow.models.dag import DAG
 
         return DAG
     if name == "AirflowException":
-        from airflow.exceptions import AirflowException  # pylint: disable=redefined-outer-name
+        from airflow.exceptions import AirflowException
 
         return AirflowException
     raise AttributeError(f"module {__name__} has no attribute {name}")
@@ -77,8 +77,8 @@ if not settings.LAZY_LOAD_PROVIDERS:
     providers_manager.ProvidersManager().initialize_providers_manager()
 
 
-# This is never executed, but tricks static analyzers (PyDev, PyCharm,
-# pylint, etc.) into knowing the types of these symbols, and what
+# This is never executed, but tricks static analyzers (PyDev, PyCharm,)
+# into knowing the types of these symbols, and what
 # they contain.
 STATICA_HACK = True
 globals()['kcah_acitats'[::-1].upper()] = False
diff --git a/airflow/api/auth/backend/basic_auth.py b/airflow/api/auth/backend/basic_auth.py
index 623beaf..071e7fb 100644
--- a/airflow/api/auth/backend/basic_auth.py
+++ b/airflow/api/auth/backend/basic_auth.py
@@ -30,7 +30,7 @@ def init_app(_):
     """Initializes authentication backend"""
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def auth_current_user() -> Optional[User]:
diff --git a/airflow/api/auth/backend/default.py b/airflow/api/auth/backend/default.py
index d699c91..6b0a1a6 100644
--- a/airflow/api/auth/backend/default.py
+++ b/airflow/api/auth/backend/default.py
@@ -26,7 +26,7 @@ def init_app(_):
     """Initializes authentication backend"""
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def requires_authentication(function: T):
diff --git a/airflow/api/auth/backend/deny_all.py b/airflow/api/auth/backend/deny_all.py
index 9ef05d0..614e263 100644
--- a/airflow/api/auth/backend/deny_all.py
+++ b/airflow/api/auth/backend/deny_all.py
@@ -28,14 +28,14 @@ def init_app(_):
     """Initializes authentication"""
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def requires_authentication(function: T):
     """Decorator for functions that require authentication"""
 
     @wraps(function)
-    def decorated(*args, **kwargs):  # pylint: disable=unused-argument
+    def decorated(*args, **kwargs):
         return Response("Forbidden", 403)
 
     return cast(T, decorated)
diff --git a/airflow/api/auth/backend/kerberos_auth.py b/airflow/api/auth/backend/kerberos_auth.py
index e57d477..fb76e8a 100644
--- a/airflow/api/auth/backend/kerberos_auth.py
+++ b/airflow/api/auth/backend/kerberos_auth.py
@@ -54,11 +54,11 @@ from airflow.configuration import conf
 
 log = logging.getLogger(__name__)
 
-# pylint: disable=c-extension-no-member
+
 CLIENT_AUTH: Optional[Union[Tuple[str, str], Any]] = HTTPKerberosAuth(service='airflow')
 
 
-class KerberosService:  # pylint: disable=too-few-public-methods
+class KerberosService:
     """Class to keep information about the Kerberos Service initialized"""
 
     def __init__(self):
@@ -126,7 +126,7 @@ def _gssapi_authenticate(token):
             kerberos.authGSSServerClean(state)
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def requires_authentication(function: T):
diff --git a/airflow/api/client/json_client.py b/airflow/api/client/json_client.py
index 1ffe7fd..2d5fd02 100644
--- a/airflow/api/client/json_client.py
+++ b/airflow/api/client/json_client.py
@@ -31,12 +31,12 @@ class Client(api_client.Client):
         }
         if json is not None:
             params['json'] = json
-        resp = getattr(self._session, method.lower())(**params)  # pylint: disable=not-callable
+        resp = getattr(self._session, method.lower())(**params)
         if not resp.ok:
             # It is justified here because there might be many resp types.
             try:
                 data = resp.json()
-            except Exception:  # noqa pylint: disable=broad-except
+            except Exception:
                 data = {}
             raise OSError(data.get('error', 'Server error'))
 
diff --git a/airflow/api/common/experimental/delete_dag.py b/airflow/api/common/experimental/delete_dag.py
index d27c21f..4462070 100644
--- a/airflow/api/common/experimental/delete_dag.py
+++ b/airflow/api/common/experimental/delete_dag.py
@@ -51,7 +51,7 @@ def delete_dag(dag_id: str, keep_records_in_log: bool = True, session=None) -> i
 
     count = 0
 
-    for model in models.base.Base._decl_class_registry.values():  # noqa pylint: disable=protected-access
+    for model in models.base.Base._decl_class_registry.values():
         if hasattr(model, "dag_id"):
             if keep_records_in_log and model.__name__ == 'Log':
                 continue
diff --git a/airflow/api/common/experimental/mark_tasks.py b/airflow/api/common/experimental/mark_tasks.py
index 7612270..6adfd11 100644
--- a/airflow/api/common/experimental/mark_tasks.py
+++ b/airflow/api/common/experimental/mark_tasks.py
@@ -70,7 +70,7 @@ def set_state(
     state: str = State.SUCCESS,
     commit: bool = False,
     session=None,
-):  # pylint: disable=too-many-arguments,too-many-locals
+):
     """
     Set the state of a task instance and if needed its relatives. Can set state
     for future tasks (calculated from execution_date) and retroactively
@@ -134,14 +134,13 @@ def set_state(
     return tis_altered
 
 
-# Flake and pylint disagree about correct indents here
-def all_subdag_tasks_query(sub_dag_run_ids, session, state, confirmed_dates):  # noqa: E123
+def all_subdag_tasks_query(sub_dag_run_ids, session, state, confirmed_dates):
     """Get *all* tasks of the sub dags"""
     qry_sub_dag = (
         session.query(TaskInstance)
         .filter(TaskInstance.dag_id.in_(sub_dag_run_ids), TaskInstance.execution_date.in_(confirmed_dates))
         .filter(or_(TaskInstance.state.is_(None), TaskInstance.state != state))
-    )  # noqa: E123
+    )
     return qry_sub_dag
 
 
@@ -152,7 +151,7 @@ def get_all_dag_task_query(dag, session, state, task_ids, confirmed_dates):
         .filter(
             TaskInstance.dag_id == dag.dag_id,
             TaskInstance.execution_date.in_(confirmed_dates),
-            TaskInstance.task_id.in_(task_ids),  # noqa: E123
+            TaskInstance.task_id.in_(task_ids),
         )
         .filter(or_(TaskInstance.state.is_(None), TaskInstance.state != state))
     )
diff --git a/airflow/api/common/experimental/trigger_dag.py b/airflow/api/common/experimental/trigger_dag.py
index 519079e..f82f88f 100644
--- a/airflow/api/common/experimental/trigger_dag.py
+++ b/airflow/api/common/experimental/trigger_dag.py
@@ -34,7 +34,7 @@ def _trigger_dag(
     conf: Optional[Union[dict, str]] = None,
     execution_date: Optional[datetime] = None,
     replace_microseconds: bool = True,
-) -> List[DagRun]:  # pylint: disable=too-many-arguments
+) -> List[DagRun]:
     """Triggers DAG run.
 
     :param dag_id: DAG ID
diff --git a/airflow/api_connexion/endpoints/dag_run_endpoint.py b/airflow/api_connexion/endpoints/dag_run_endpoint.py
index 2f65f06..27d13b6 100644
--- a/airflow/api_connexion/endpoints/dag_run_endpoint.py
+++ b/airflow/api_connexion/endpoints/dag_run_endpoint.py
@@ -96,7 +96,7 @@ def get_dag_runs(
     offset=None,
     limit=None,
     order_by='id',
-):  # pylint: disable=too-many-arguments
+):
     """Get all DAG Runs."""
     query = session.query(DagRun)
 
@@ -134,7 +134,7 @@ def _fetch_dag_runs(
     limit,
     offset,
     order_by,
-):  # pylint: disable=too-many-arguments
+):
     query = _apply_date_filters_to_query(
         query,
         end_date_gte,
diff --git a/airflow/api_connexion/endpoints/health_endpoint.py b/airflow/api_connexion/endpoints/health_endpoint.py
index ea0d62d..ce7666b 100644
--- a/airflow/api_connexion/endpoints/health_endpoint.py
+++ b/airflow/api_connexion/endpoints/health_endpoint.py
@@ -33,7 +33,7 @@ def get_health():
             latest_scheduler_heartbeat = scheduler_job.latest_heartbeat.isoformat()
             if scheduler_job.is_alive():
                 scheduler_status = HEALTHY
-    except Exception:  # pylint: disable=broad-except
+    except Exception:
         metadatabase_status = UNHEALTHY
 
     payload = {
diff --git a/airflow/api_connexion/endpoints/task_instance_endpoint.py b/airflow/api_connexion/endpoints/task_instance_endpoint.py
index 418bded..51c48bd 100644
--- a/airflow/api_connexion/endpoints/task_instance_endpoint.py
+++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py
@@ -126,7 +126,7 @@ def get_task_instances(
     queue: Optional[List[str]] = None,
     offset: Optional[int] = None,
     session=None,
-):  # pylint: disable=too-many-arguments
+):
     """Get list of task instances."""
     base_query = session.query(TI)
 
diff --git a/airflow/api_connexion/parameters.py b/airflow/api_connexion/parameters.py
index 8e06301..649a0f4 100644
--- a/airflow/api_connexion/parameters.py
+++ b/airflow/api_connexion/parameters.py
@@ -64,7 +64,7 @@ def check_limit(value: int):
     return value
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def format_parameters(params_formatters: Dict[str, Callable[..., bool]]) -> Callable[[T], T]:
diff --git a/airflow/api_connexion/schemas/common_schema.py b/airflow/api_connexion/schemas/common_schema.py
index f8d6e48..22b1812 100644
--- a/airflow/api_connexion/schemas/common_schema.py
+++ b/airflow/api_connexion/schemas/common_schema.py
@@ -155,12 +155,12 @@ class ClassReferenceSchema(Schema):
 
     def _get_module(self, obj):
         if isinstance(obj, SerializedBaseOperator):
-            return obj._task_module  # pylint: disable=protected-access
+            return obj._task_module
         return inspect.getmodule(obj).__name__
 
     def _get_class_name(self, obj):
         if isinstance(obj, SerializedBaseOperator):
-            return obj._task_type  # pylint: disable=protected-access
+            return obj._task_type
         if isinstance(obj, type):
             return obj.__name__
         return type(obj).__name__
diff --git a/airflow/api_connexion/schemas/connection_schema.py b/airflow/api_connexion/schemas/connection_schema.py
index 44e3224..c3b0388 100644
--- a/airflow/api_connexion/schemas/connection_schema.py
+++ b/airflow/api_connexion/schemas/connection_schema.py
@@ -39,7 +39,7 @@ class ConnectionCollectionItemSchema(SQLAlchemySchema):
     port = auto_field()
 
 
-class ConnectionSchema(ConnectionCollectionItemSchema):  # pylint: disable=too-many-ancestors
+class ConnectionSchema(ConnectionCollectionItemSchema):
     """Connection schema"""
 
     password = auto_field(load_only=True)
diff --git a/airflow/api_connexion/schemas/error_schema.py b/airflow/api_connexion/schemas/error_schema.py
index 5ae228f..c9462b5 100644
--- a/airflow/api_connexion/schemas/error_schema.py
+++ b/airflow/api_connexion/schemas/error_schema.py
@@ -19,7 +19,7 @@ from typing import List, NamedTuple
 from marshmallow import Schema, fields
 from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field
 
-from airflow.models.errors import ImportError  # pylint: disable=redefined-builtin
+from airflow.models.errors import ImportError
 
 
 class ImportErrorSchema(SQLAlchemySchema):
diff --git a/airflow/api_connexion/security.py b/airflow/api_connexion/security.py
index 4faa9ed..df71856 100644
--- a/airflow/api_connexion/security.py
+++ b/airflow/api_connexion/security.py
@@ -22,7 +22,7 @@ from flask import Response, current_app
 
 from airflow.api_connexion.exceptions import PermissionDenied, Unauthenticated
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def check_authentication() -> None:
diff --git a/airflow/cli/cli_parser.py b/airflow/cli/cli_parser.py
index e1a5826..1c16c9f 100644
--- a/airflow/cli/cli_parser.py
+++ b/airflow/cli/cli_parser.py
@@ -63,7 +63,7 @@ class DefaultHelpParser(argparse.ArgumentParser):
             raise ArgumentError(action, message)
         if value == 'kubernetes':
             try:
-                import kubernetes.client  # noqa: F401 pylint: disable=unused-import
+                import kubernetes.client  # noqa: F401
             except ImportError:
                 message = (
                     'The kubernetes subcommand requires that you pip install the kubernetes python client.'
@@ -89,7 +89,6 @@ _UNSET = object()
 class Arg:
     """Class to keep information about command line argument"""
 
-    # pylint: disable=redefined-builtin,unused-argument,too-many-arguments
     def __init__(
         self,
         flags=_UNSET,
@@ -113,8 +112,6 @@ class Arg:
 
             self.kwargs[k] = v
 
-    # pylint: enable=redefined-builtin,unused-argument,too-many-arguments
-
     def add_to_parser(self, parser: argparse.ArgumentParser):
         """Add this argument to an ArgumentParser"""
         parser.add_argument(*self.flags, **self.kwargs)
@@ -1641,7 +1638,7 @@ class AirflowHelpFormatter(argparse.HelpFormatter):
     """
 
     def _format_action(self, action: Action):
-        if isinstance(action, argparse._SubParsersAction):  # pylint: disable=protected-access
+        if isinstance(action, argparse._SubParsersAction):
 
             parts = []
             action_header = self._format_action_invocation(action)
@@ -1649,7 +1646,7 @@ class AirflowHelpFormatter(argparse.HelpFormatter):
             parts.append(action_header)
 
             self._indent()
-            subactions = action._get_subactions()  # pylint: disable=protected-access
+            subactions = action._get_subactions()
             action_subcommands, group_subcommands = partition(
                 lambda d: isinstance(ALL_COMMANDS_DICT[d.dest], GroupCommand), subactions
             )
@@ -1703,9 +1700,7 @@ def _sort_args(args: Iterable[Arg]) -> Iterable[Arg]:
     yield from sorted(optional, key=lambda x: get_long_option(x).lower())
 
 
-def _add_command(
-    subparsers: argparse._SubParsersAction, sub: CLICommand  # pylint: disable=protected-access
-) -> None:
+def _add_command(subparsers: argparse._SubParsersAction, sub: CLICommand) -> None:
     sub_proc = subparsers.add_parser(
         sub.name, help=sub.help, description=sub.description or sub.help, epilog=sub.epilog
     )
diff --git a/airflow/cli/commands/celery_command.py b/airflow/cli/commands/celery_command.py
index e01f177..ba3c45e 100644
--- a/airflow/cli/commands/celery_command.py
+++ b/airflow/cli/commands/celery_command.py
@@ -180,7 +180,7 @@ def worker(args):
 
 
 @cli_utils.action_logging
-def stop_worker(args):  # pylint: disable=unused-argument
+def stop_worker(args):
     """Sends SIGTERM to Celery worker"""
     # Read PID from file
     pid_file_path, _, _, _ = setup_locations(process=WORKER_PROCESS_NAME)
diff --git a/airflow/cli/commands/info_command.py b/airflow/cli/commands/info_command.py
index 9eb3e4b..590a96c 100644
--- a/airflow/cli/commands/info_command.py
+++ b/airflow/cli/commands/info_command.py
@@ -214,7 +214,7 @@ class AirflowInfo:
         try:
             handler_names = [get_fullname(handler) for handler in logging.getLogger('airflow.task').handlers]
             return ", ".join(handler_names)
-        except Exception:  # noqa pylint: disable=broad-except
+        except Exception:
             return "NOT AVAILABLE"
 
     @property
diff --git a/airflow/cli/commands/kubernetes_command.py b/airflow/cli/commands/kubernetes_command.py
index 3aad616..3c3c8e6 100644
--- a/airflow/cli/commands/kubernetes_command.py
+++ b/airflow/cli/commands/kubernetes_command.py
@@ -106,7 +106,7 @@ def cleanup_pods(args):
             ]
         ),
     }
-    while True:  # pylint: disable=too-many-nested-blocks
+    while True:
         pod_list = kube_client.list_namespaced_pod(**list_kwargs)
         for pod in pod_list.items:
             pod_name = pod.metadata.name
@@ -130,7 +130,7 @@ def cleanup_pods(args):
                     print(f"Can't remove POD: {e}", file=sys.stderr)
                 continue
             print(f'No action taken on pod {pod_name}')
-        continue_token = pod_list.metadata._continue  # pylint: disable=protected-access
+        continue_token = pod_list.metadata._continue
         if not continue_token:
             break
         list_kwargs["_continue"] = continue_token
diff --git a/airflow/cli/commands/plugins_command.py b/airflow/cli/commands/plugins_command.py
index 09a78df..2d59e90 100644
--- a/airflow/cli/commands/plugins_command.py
+++ b/airflow/cli/commands/plugins_command.py
@@ -45,7 +45,7 @@ def dump_plugins(args):
         return
 
     # Remove empty info
-    if args.output == "table":  # pylint: disable=too-many-nested-blocks
+    if args.output == "table":
         # We can do plugins_info[0] as the element it will exist as there's
         # at least one plugin at this point
         for col in list(plugins_info[0]):
diff --git a/airflow/cli/commands/pool_command.py b/airflow/cli/commands/pool_command.py
index b116e25..3ea9e65 100644
--- a/airflow/cli/commands/pool_command.py
+++ b/airflow/cli/commands/pool_command.py
@@ -103,7 +103,7 @@ def pool_import_helper(filepath):
 
     with open(filepath) as poolfile:
         data = poolfile.read()
-    try:  # pylint: disable=too-many-nested-blocks
+    try:
         pools_json = json.loads(data)
     except JSONDecodeError as e:
         raise SystemExit("Invalid json file: " + str(e))
diff --git a/airflow/cli/commands/role_command.py b/airflow/cli/commands/role_command.py
index 0d6ab33..454a1b2 100644
--- a/airflow/cli/commands/role_command.py
+++ b/airflow/cli/commands/role_command.py
@@ -27,7 +27,7 @@ from airflow.www.app import cached_app
 @suppress_logs_and_warning
 def roles_list(args):
     """Lists all existing roles"""
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
+    appbuilder = cached_app().appbuilder
     roles = appbuilder.sm.get_all_roles()
     AirflowConsole().print_as(
         data=sorted(r.name for r in roles), output=args.output, mapper=lambda x: {"name": x}
@@ -38,7 +38,7 @@ def roles_list(args):
 @suppress_logs_and_warning
 def roles_create(args):
     """Creates new empty role in DB"""
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
+    appbuilder = cached_app().appbuilder
     for role_name in args.role:
         appbuilder.sm.add_role(role_name)
     print(f"Added {len(args.role)} role(s)")
diff --git a/airflow/cli/commands/sync_perm_command.py b/airflow/cli/commands/sync_perm_command.py
index d957fcb..eba8c2f 100644
--- a/airflow/cli/commands/sync_perm_command.py
+++ b/airflow/cli/commands/sync_perm_command.py
@@ -23,8 +23,8 @@ from airflow.www.app import cached_app
 @cli_utils.action_logging
 def sync_perm(args):
     """Updates permissions for existing roles and DAGs"""
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
-    print('Updating permission, view-menu for all existing roles')
+    appbuilder = cached_app().appbuilder
+    print('Updating actions and resources for all existing roles')
     # Add missing permissions for all the Base Views _before_ syncing/creating roles
     appbuilder.add_permissions(update_perms=True)
     appbuilder.sm.sync_roles()
diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py
index c0cfb03..0302d4d 100644
--- a/airflow/cli/commands/task_command.py
+++ b/airflow/cli/commands/task_command.py
@@ -146,7 +146,7 @@ def _run_raw_task(args, ti: TaskInstance) -> None:
                 ", ".join(f"--{o}" for o in unsupported_options),
             )
         )
-    ti._run_raw_task(  # pylint: disable=protected-access
+    ti._run_raw_task(
         mark_success=args.mark_success,
         job_id=args.job_id,
         pool=args.pool,
@@ -391,7 +391,7 @@ def task_test(args, dag=None):
             ti.dry_run()
         else:
             ti.run(ignore_task_deps=True, ignore_ti_state=True, test_mode=True)
-    except Exception:  # pylint: disable=broad-except
+    except Exception:
         if args.post_mortem:
             debugger = _guess_debugger()
             debugger.post_mortem()
diff --git a/airflow/cli/commands/user_command.py b/airflow/cli/commands/user_command.py
index ffaf337..172734d 100644
--- a/airflow/cli/commands/user_command.py
+++ b/airflow/cli/commands/user_command.py
@@ -32,7 +32,7 @@ from airflow.www.app import cached_app
 @suppress_logs_and_warning
 def users_list(args):
     """Lists users at the command line"""
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
+    appbuilder = cached_app().appbuilder
     users = appbuilder.sm.get_all_users()
     fields = ['id', 'username', 'email', 'first_name', 'last_name', 'roles']
 
@@ -44,7 +44,7 @@ def users_list(args):
 @cli_utils.action_logging
 def users_create(args):
     """Creates new user in the DB"""
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
+    appbuilder = cached_app().appbuilder
     role = appbuilder.sm.find_role(args.role)
     if not role:
         valid_roles = appbuilder.sm.get_all_roles()
@@ -73,7 +73,7 @@ def users_create(args):
 @cli_utils.action_logging
 def users_delete(args):
     """Deletes user from DB"""
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
+    appbuilder = cached_app().appbuilder
 
     try:
         user = next(u for u in appbuilder.sm.get_all_users() if u.username == args.username)
@@ -95,10 +95,7 @@ def users_manage_role(args, remove=False):
     if args.username and args.email:
         raise SystemExit('Conflicting args: must supply either --username or --email, but not both')
 
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
-    user = appbuilder.sm.find_user(username=args.username) or appbuilder.sm.find_user(email=args.email)
-    if not user:
-        raise SystemExit(f'User "{args.username or args.email}" does not exist')
+    appbuilder = cached_app().appbuilder
 
     role = appbuilder.sm.find_role(args.role)
     if not role:
@@ -123,7 +120,7 @@ def users_manage_role(args, remove=False):
 
 def users_export(args):
     """Exports all users to the json file"""
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
+    appbuilder = cached_app().appbuilder
     users = appbuilder.sm.get_all_users()
     fields = ['id', 'username', 'email', 'first_name', 'last_name', 'roles']
 
@@ -154,7 +151,7 @@ def users_import(args):
     if not os.path.exists(json_file):
         raise SystemExit(f"File '{json_file}' does not exist")
 
-    users_list = None  # pylint: disable=redefined-outer-name
+    users_list = None
     try:
         with open(json_file) as file:
             users_list = json.loads(file.read())
@@ -169,8 +166,8 @@ def users_import(args):
         print("Updated the following users:\n\t{}".format("\n\t".join(users_updated)))
 
 
-def _import_users(users_list):  # pylint: disable=redefined-outer-name
-    appbuilder = cached_app().appbuilder  # pylint: disable=no-member
+def _import_users(users_list):
+    appbuilder = cached_app().appbuilder
     users_created = []
     users_updated = []
 
diff --git a/airflow/cli/commands/variable_command.py b/airflow/cli/commands/variable_command.py
index 07563e1..cacfbdf 100644
--- a/airflow/cli/commands/variable_command.py
+++ b/airflow/cli/commands/variable_command.py
@@ -91,7 +91,7 @@ def _import_helper(filepath):
         for k, v in var_json.items():
             try:
                 Variable.set(k, v, serialize_json=not isinstance(v, str))
-            except Exception as e:  # pylint: disable=broad-except
+            except Exception as e:
                 print(f'Variable import failed: {repr(e)}')
                 fail_count += 1
             else:
@@ -111,7 +111,7 @@ def _variable_export_helper(filepath):
         for var in qry:
             try:
                 val = data.decode(var.val)
-            except Exception:  # pylint: disable=broad-except
+            except Exception:
                 val = var.val
             var_dict[var.key] = val
 
diff --git a/airflow/cli/commands/webserver_command.py b/airflow/cli/commands/webserver_command.py
index e786eb3..db6fbbb 100644
--- a/airflow/cli/commands/webserver_command.py
+++ b/airflow/cli/commands/webserver_command.py
@@ -133,7 +133,7 @@ class GunicornMonitor(LoggingMixin):
         def ready_prefix_on_cmdline(proc):
             try:
                 cmdline = proc.cmdline()
-                if len(cmdline) > 0:  # pylint: disable=len-as-condition
+                if len(cmdline) > 0:
                     return settings.GUNICORN_WORKER_READY_PREFIX in cmdline[0]
             except psutil.NoSuchProcess:
                 pass
@@ -201,7 +201,7 @@ class GunicornMonitor(LoggingMixin):
 
     def start(self) -> NoReturn:
         """Starts monitoring the webserver."""
-        try:  # pylint: disable=too-many-nested-blocks
+        try:
             self._wait_until_true(
                 lambda: self.num_workers_expected == self._get_num_workers_running(),
                 timeout=self.master_timeout,
@@ -427,7 +427,7 @@ def webserver(args):
 
         gunicorn_master_proc = None
 
-        def kill_proc(signum, _):  # pylint: disable=unused-argument
+        def kill_proc(signum, _):
             log.info("Received signal: %s. Closing gunicorn.", signum)
             gunicorn_master_proc.terminate()
             with suppress(TimeoutError):
diff --git a/airflow/cli/simple_table.py b/airflow/cli/simple_table.py
index d17f948..30f29e3 100644
--- a/airflow/cli/simple_table.py
+++ b/airflow/cli/simple_table.py
@@ -73,7 +73,6 @@ class AirflowConsole(Console):
         output = tabulate(rows, tablefmt="plain", headers=data[0].keys())
         print(output)
 
-    # pylint: disable=too-many-return-statements
     def _normalize_data(self, value: Any, output: str) -> Optional[Union[list, str, dict]]:
         if isinstance(value, (tuple, list)):
             if output == "table":
@@ -125,7 +124,7 @@ class SimpleTable(Table):
         self.title_justify = kwargs.get("title_justify", "left")
         self.caption = kwargs.get("caption", " ")
 
-    def add_column(self, *args, **kwargs) -> None:  # pylint: disable=signature-differs
+    def add_column(self, *args, **kwargs) -> None:
         """Add a column to the table. We use different default"""
         kwargs["overflow"] = kwargs.get("overflow")  # to avoid truncating
         super().add_column(*args, **kwargs)
diff --git a/airflow/compat/functools.py b/airflow/compat/functools.py
index 10b4085..e3dea0a 100644
--- a/airflow/compat/functools.py
+++ b/airflow/compat/functools.py
@@ -18,12 +18,12 @@
 import sys
 
 if sys.version_info >= (3, 8):
-    from functools import cached_property  # pylint: disable=no-name-in-module
+    from functools import cached_property
 else:
     from cached_property import cached_property
 
 if sys.version_info >= (3, 9):
-    from functools import cache  # pylint: disable=no-name-in-module
+    from functools import cache
 else:
     from functools import lru_cache
 
diff --git a/airflow/configuration.py b/airflow/configuration.py
index ed38efd..8ea38fe 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -102,7 +102,7 @@ def default_config_yaml() -> List[dict]:
         return yaml.safe_load(config_file)
 
 
-class AirflowConfigParser(ConfigParser):  # pylint: disable=too-many-ancestors
+class AirflowConfigParser(ConfigParser):
     """Custom Airflow Configparser supporting defaults and deprecated options"""
 
     # These configuration elements can be fetched as the stdout of commands
@@ -265,7 +265,7 @@ class AirflowConfigParser(ConfigParser):  # pylint: disable=too-many-ancestors
                     + f"{list_mode}. Possible values are {', '.join(file_parser_modes)}."
                 )
 
-    def _using_old_value(self, old, current_value):  # noqa
+    def _using_old_value(self, old, current_value):
         return old.search(current_value) is not None
 
     def _update_env_var(self, section, name, new_value):
@@ -450,7 +450,7 @@ class AirflowConfigParser(ConfigParser):  # pylint: disable=too-many-ancestors
                 f'Current value: "{val}".'
             )
 
-    def getimport(self, section, key, **kwargs):  # noqa
+    def getimport(self, section, key, **kwargs):
         """
         Reads options, imports the full qualified name, and returns the object.
 
@@ -779,7 +779,7 @@ def parameterized_config(template):
     :param template: a config content templated with {{variables}}
     """
     all_vars = {k: v for d in [globals(), locals()] for k, v in d.items()}
-    return template.format(**all_vars)  # noqa
+    return template.format(**all_vars)
 
 
 def get_airflow_test_config(airflow_home):
@@ -876,7 +876,7 @@ def initialize_config():
 
 
 # Historical convenience functions to access config entries
-def load_test_config():  # noqa: D103
+def load_test_config():
     """Historical load_test_config"""
     warnings.warn(
         "Accessing configuration method 'load_test_config' directly from the configuration module is "
@@ -888,7 +888,7 @@ def load_test_config():  # noqa: D103
     conf.load_test_config()
 
 
-def get(*args, **kwargs):  # noqa: D103
+def get(*args, **kwargs):
     """Historical get"""
     warnings.warn(
         "Accessing configuration method 'get' directly from the configuration module is "
@@ -900,7 +900,7 @@ def get(*args, **kwargs):  # noqa: D103
     return conf.get(*args, **kwargs)
 
 
-def getboolean(*args, **kwargs):  # noqa: D103
+def getboolean(*args, **kwargs):
     """Historical getboolean"""
     warnings.warn(
         "Accessing configuration method 'getboolean' directly from the configuration module is "
@@ -912,7 +912,7 @@ def getboolean(*args, **kwargs):  # noqa: D103
     return conf.getboolean(*args, **kwargs)
 
 
-def getfloat(*args, **kwargs):  # noqa: D103
+def getfloat(*args, **kwargs):
     """Historical getfloat"""
     warnings.warn(
         "Accessing configuration method 'getfloat' directly from the configuration module is "
@@ -924,7 +924,7 @@ def getfloat(*args, **kwargs):  # noqa: D103
     return conf.getfloat(*args, **kwargs)
 
 
-def getint(*args, **kwargs):  # noqa: D103
+def getint(*args, **kwargs):
     """Historical getint"""
     warnings.warn(
         "Accessing configuration method 'getint' directly from the configuration module is "
@@ -936,7 +936,7 @@ def getint(*args, **kwargs):  # noqa: D103
     return conf.getint(*args, **kwargs)
 
 
-def getsection(*args, **kwargs):  # noqa: D103
+def getsection(*args, **kwargs):
     """Historical getsection"""
     warnings.warn(
         "Accessing configuration method 'getsection' directly from the configuration module is "
@@ -948,7 +948,7 @@ def getsection(*args, **kwargs):  # noqa: D103
     return conf.getsection(*args, **kwargs)
 
 
-def has_option(*args, **kwargs):  # noqa: D103
+def has_option(*args, **kwargs):
     """Historical has_option"""
     warnings.warn(
         "Accessing configuration method 'has_option' directly from the configuration module is "
@@ -960,7 +960,7 @@ def has_option(*args, **kwargs):  # noqa: D103
     return conf.has_option(*args, **kwargs)
 
 
-def remove_option(*args, **kwargs):  # noqa: D103
+def remove_option(*args, **kwargs):
     """Historical remove_option"""
     warnings.warn(
         "Accessing configuration method 'remove_option' directly from the configuration module is "
@@ -972,7 +972,7 @@ def remove_option(*args, **kwargs):  # noqa: D103
     return conf.remove_option(*args, **kwargs)
 
 
-def as_dict(*args, **kwargs):  # noqa: D103
+def as_dict(*args, **kwargs):
     """Historical as_dict"""
     warnings.warn(
         "Accessing configuration method 'as_dict' directly from the configuration module is "
@@ -984,7 +984,7 @@ def as_dict(*args, **kwargs):  # noqa: D103
     return conf.as_dict(*args, **kwargs)
 
 
-def set(*args, **kwargs):  # noqa pylint: disable=redefined-builtin
+def set(*args, **kwargs):
     """Historical set"""
     warnings.warn(
         "Accessing configuration method 'set' directly from the configuration module is "
diff --git a/airflow/contrib/hooks/aws_athena_hook.py b/airflow/contrib/hooks/aws_athena_hook.py
index a00fadb..db1ecdf 100644
--- a/airflow/contrib/hooks/aws_athena_hook.py
+++ b/airflow/contrib/hooks/aws_athena_hook.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.athena import AWSAthenaHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_datasync_hook.py b/airflow/contrib/hooks/aws_datasync_hook.py
index 68d74d8..0d48547 100644
--- a/airflow/contrib/hooks/aws_datasync_hook.py
+++ b/airflow/contrib/hooks/aws_datasync_hook.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.datasync import AWSDataSyncHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_dynamodb_hook.py b/airflow/contrib/hooks/aws_dynamodb_hook.py
index 92153b9..dedb800 100644
--- a/airflow/contrib/hooks/aws_dynamodb_hook.py
+++ b/airflow/contrib/hooks/aws_dynamodb_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.dynamodb import AwsDynamoDBHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_firehose_hook.py b/airflow/contrib/hooks/aws_firehose_hook.py
index 4e98e28..c6d39cd 100644
--- a/airflow/contrib/hooks/aws_firehose_hook.py
+++ b/airflow/contrib/hooks/aws_firehose_hook.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.kinesis import AwsFirehoseHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_glue_catalog_hook.py b/airflow/contrib/hooks/aws_glue_catalog_hook.py
index 3726488..703ba47 100644
--- a/airflow/contrib/hooks/aws_glue_catalog_hook.py
+++ b/airflow/contrib/hooks/aws_glue_catalog_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.glue_catalog import AwsGlueCatalogHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_hook.py b/airflow/contrib/hooks/aws_hook.py
index 1677982..c40e32c 100644
--- a/airflow/contrib/hooks/aws_hook.py
+++ b/airflow/contrib/hooks/aws_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook, _parse_s3_config, boto3  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_lambda_hook.py b/airflow/contrib/hooks/aws_lambda_hook.py
index b1987f1..379aaf5 100644
--- a/airflow/contrib/hooks/aws_lambda_hook.py
+++ b/airflow/contrib/hooks/aws_lambda_hook.py
@@ -23,7 +23,6 @@ Please use :mod:`airflow.providers.amazon.aws.hooks.lambda_function`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.lambda_function import AwsLambdaHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_logs_hook.py b/airflow/contrib/hooks/aws_logs_hook.py
index 88c03c8..9b9c449 100644
--- a/airflow/contrib/hooks/aws_logs_hook.py
+++ b/airflow/contrib/hooks/aws_logs_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_sns_hook.py b/airflow/contrib/hooks/aws_sns_hook.py
index 47e6410..b1318f5 100644
--- a/airflow/contrib/hooks/aws_sns_hook.py
+++ b/airflow/contrib/hooks/aws_sns_hook.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.sns import AwsSnsHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/aws_sqs_hook.py b/airflow/contrib/hooks/aws_sqs_hook.py
index c504faa..66cc1fa 100644
--- a/airflow/contrib/hooks/aws_sqs_hook.py
+++ b/airflow/contrib/hooks/aws_sqs_hook.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.sqs import SQSHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/azure_container_instance_hook.py b/airflow/contrib/hooks/azure_container_instance_hook.py
index 9ee03c7..5b40f9c 100644
--- a/airflow/contrib/hooks/azure_container_instance_hook.py
+++ b/airflow/contrib/hooks/azure_container_instance_hook.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.microsoft.azure.hooks.azure_container_instanc
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.hooks.azure_container_instance import (  # noqa
     AzureContainerInstanceHook,
 )
diff --git a/airflow/contrib/hooks/azure_container_registry_hook.py b/airflow/contrib/hooks/azure_container_registry_hook.py
index fe9cfdc..840cf89 100644
--- a/airflow/contrib/hooks/azure_container_registry_hook.py
+++ b/airflow/contrib/hooks/azure_container_registry_hook.py
@@ -22,7 +22,6 @@ Please use `airflow.providers.microsoft.azure.hooks.azure_container_registry`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.hooks.azure_container_registry import (  # noqa
     AzureContainerRegistryHook,
 )
diff --git a/airflow/contrib/hooks/azure_container_volume_hook.py b/airflow/contrib/hooks/azure_container_volume_hook.py
index b00c18a..4b325ad 100644
--- a/airflow/contrib/hooks/azure_container_volume_hook.py
+++ b/airflow/contrib/hooks/azure_container_volume_hook.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.microsoft.azure.hooks.azure_container_volume`
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.hooks.azure_container_volume import AzureContainerVolumeHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/azure_cosmos_hook.py b/airflow/contrib/hooks/azure_cosmos_hook.py
index d449507..26abe61 100644
--- a/airflow/contrib/hooks/azure_cosmos_hook.py
+++ b/airflow/contrib/hooks/azure_cosmos_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.hooks.azure_cosmos import AzureCosmosDBHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/azure_data_lake_hook.py b/airflow/contrib/hooks/azure_data_lake_hook.py
index ff63e56..a89961d 100644
--- a/airflow/contrib/hooks/azure_data_lake_hook.py
+++ b/airflow/contrib/hooks/azure_data_lake_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.hooks.azure_data_lake import AzureDataLakeHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/azure_fileshare_hook.py b/airflow/contrib/hooks/azure_fileshare_hook.py
index a69e57b..2c49d41 100644
--- a/airflow/contrib/hooks/azure_fileshare_hook.py
+++ b/airflow/contrib/hooks/azure_fileshare_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.hooks.azure_fileshare import AzureFileShareHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py
index 64e8672..8a90ef8 100644
--- a/airflow/contrib/hooks/bigquery_hook.py
+++ b/airflow/contrib/hooks/bigquery_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.bigquery import (  # noqa
     BigQueryBaseCursor,
     BigQueryConnection,
diff --git a/airflow/contrib/hooks/cassandra_hook.py b/airflow/contrib/hooks/cassandra_hook.py
index 38efbce..ea4c748 100644
--- a/airflow/contrib/hooks/cassandra_hook.py
+++ b/airflow/contrib/hooks/cassandra_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.cassandra.hooks.cassandra import CassandraHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/cloudant_hook.py b/airflow/contrib/hooks/cloudant_hook.py
index 9a68195..ab7a1fa 100644
--- a/airflow/contrib/hooks/cloudant_hook.py
+++ b/airflow/contrib/hooks/cloudant_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.cloudant.hooks.cloudant import CloudantHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/databricks_hook.py b/airflow/contrib/hooks/databricks_hook.py
index 7d12d84..ce77751 100644
--- a/airflow/contrib/hooks/databricks_hook.py
+++ b/airflow/contrib/hooks/databricks_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.databricks.hooks.databricks import (  # noqa
     CANCEL_RUN_ENDPOINT,
     GET_RUN_ENDPOINT,
diff --git a/airflow/contrib/hooks/datadog_hook.py b/airflow/contrib/hooks/datadog_hook.py
index 728dc92..be275e9 100644
--- a/airflow/contrib/hooks/datadog_hook.py
+++ b/airflow/contrib/hooks/datadog_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.datadog.hooks.datadog import DatadogHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/datastore_hook.py b/airflow/contrib/hooks/datastore_hook.py
index 31300ac..9898e2f 100644
--- a/airflow/contrib/hooks/datastore_hook.py
+++ b/airflow/contrib/hooks/datastore_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.datastore import DatastoreHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/dingding_hook.py b/airflow/contrib/hooks/dingding_hook.py
index cde1b09..deff041 100644
--- a/airflow/contrib/hooks/dingding_hook.py
+++ b/airflow/contrib/hooks/dingding_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.dingding.hooks.dingding import DingdingHook, requests  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/discord_webhook_hook.py b/airflow/contrib/hooks/discord_webhook_hook.py
index a9aca47..a907d21 100644
--- a/airflow/contrib/hooks/discord_webhook_hook.py
+++ b/airflow/contrib/hooks/discord_webhook_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.discord.hooks.discord_webhook import DiscordWebhookHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/emr_hook.py b/airflow/contrib/hooks/emr_hook.py
index 3278cd5..1a15ee3 100644
--- a/airflow/contrib/hooks/emr_hook.py
+++ b/airflow/contrib/hooks/emr_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.emr import EmrHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/fs_hook.py b/airflow/contrib/hooks/fs_hook.py
index 35178fd..bc247c1 100644
--- a/airflow/contrib/hooks/fs_hook.py
+++ b/airflow/contrib/hooks/fs_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.hooks.filesystem import FSHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/ftp_hook.py b/airflow/contrib/hooks/ftp_hook.py
index 1ff3980..8d2e9cb 100644
--- a/airflow/contrib/hooks/ftp_hook.py
+++ b/airflow/contrib/hooks/ftp_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.ftp.hooks.ftp import FTPHook, FTPSHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_bigtable_hook.py b/airflow/contrib/hooks/gcp_bigtable_hook.py
index 23ba275..47ccd24 100644
--- a/airflow/contrib/hooks/gcp_bigtable_hook.py
+++ b/airflow/contrib/hooks/gcp_bigtable_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.bigtable import BigtableHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_cloud_build_hook.py b/airflow/contrib/hooks/gcp_cloud_build_hook.py
index 11e8902..691ae72 100644
--- a/airflow/contrib/hooks/gcp_cloud_build_hook.py
+++ b/airflow/contrib/hooks/gcp_cloud_build_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.cloud_build import CloudBuildHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_dlp_hook.py b/airflow/contrib/hooks/gcp_dlp_hook.py
index d35009f..77a9da6 100644
--- a/airflow/contrib/hooks/gcp_dlp_hook.py
+++ b/airflow/contrib/hooks/gcp_dlp_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.dlp import CloudDLPHook, DlpJob  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_mlengine_hook.py b/airflow/contrib/hooks/gcp_mlengine_hook.py
index f154786..57978e0 100644
--- a/airflow/contrib/hooks/gcp_mlengine_hook.py
+++ b/airflow/contrib/hooks/gcp_mlengine_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.mlengine import MLEngineHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_natural_language_hook.py b/airflow/contrib/hooks/gcp_natural_language_hook.py
index 08035ed..86ee9f8 100644
--- a/airflow/contrib/hooks/gcp_natural_language_hook.py
+++ b/airflow/contrib/hooks/gcp_natural_language_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.natural_language import CloudNaturalLanguageHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_pubsub_hook.py b/airflow/contrib/hooks/gcp_pubsub_hook.py
index 29c979f..677a0f0 100644
--- a/airflow/contrib/hooks/gcp_pubsub_hook.py
+++ b/airflow/contrib/hooks/gcp_pubsub_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.pubsub import PubSubException, PubSubHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_tasks_hook.py b/airflow/contrib/hooks/gcp_tasks_hook.py
index ecb7980..1753b2a 100644
--- a/airflow/contrib/hooks/gcp_tasks_hook.py
+++ b/airflow/contrib/hooks/gcp_tasks_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.tasks import CloudTasksHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_translate_hook.py b/airflow/contrib/hooks/gcp_translate_hook.py
index db9d54c..1b0cec8 100644
--- a/airflow/contrib/hooks/gcp_translate_hook.py
+++ b/airflow/contrib/hooks/gcp_translate_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.translate import CloudTranslateHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_video_intelligence_hook.py b/airflow/contrib/hooks/gcp_video_intelligence_hook.py
index 6312683..a71ef46 100644
--- a/airflow/contrib/hooks/gcp_video_intelligence_hook.py
+++ b/airflow/contrib/hooks/gcp_video_intelligence_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.video_intelligence import CloudVideoIntelligenceHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gcp_vision_hook.py b/airflow/contrib/hooks/gcp_vision_hook.py
index 1690059..52f47f4 100644
--- a/airflow/contrib/hooks/gcp_vision_hook.py
+++ b/airflow/contrib/hooks/gcp_vision_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.hooks.vision import CloudVisionHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/gdrive_hook.py b/airflow/contrib/hooks/gdrive_hook.py
index f5b687a..dad8459 100644
--- a/airflow/contrib/hooks/gdrive_hook.py
+++ b/airflow/contrib/hooks/gdrive_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.suite.hooks.drive import GoogleDriveHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/grpc_hook.py b/airflow/contrib/hooks/grpc_hook.py
index 41c0282..f7aa6e2 100644
--- a/airflow/contrib/hooks/grpc_hook.py
+++ b/airflow/contrib/hooks/grpc_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.grpc.hooks.grpc import GrpcHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/imap_hook.py b/airflow/contrib/hooks/imap_hook.py
index 0332803..5770396 100644
--- a/airflow/contrib/hooks/imap_hook.py
+++ b/airflow/contrib/hooks/imap_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.imap.hooks.imap import ImapHook, Mail, MailPart  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/jenkins_hook.py b/airflow/contrib/hooks/jenkins_hook.py
index b00fdcc..178474e 100644
--- a/airflow/contrib/hooks/jenkins_hook.py
+++ b/airflow/contrib/hooks/jenkins_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.jenkins.hooks.jenkins import JenkinsHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/jira_hook.py b/airflow/contrib/hooks/jira_hook.py
index 69113eb..8f9d467 100644
--- a/airflow/contrib/hooks/jira_hook.py
+++ b/airflow/contrib/hooks/jira_hook.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.jira.hooks.jira import JiraHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/mongo_hook.py b/airflow/contrib/hooks/mongo_hook.py
index 6f30b7f..63f6eea 100644
--- a/airflow/contrib/hooks/mongo_hook.py
+++ b/airflow/contrib/hooks/mongo_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.mongo.hooks.mongo import MongoHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/openfaas_hook.py b/airflow/contrib/hooks/openfaas_hook.py
index 03f2458..a0e71ff 100644
--- a/airflow/contrib/hooks/openfaas_hook.py
+++ b/airflow/contrib/hooks/openfaas_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.openfaas.hooks.openfaas import OK_STATUS_CODE, OpenFaasHook, requests  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/opsgenie_alert_hook.py b/airflow/contrib/hooks/opsgenie_alert_hook.py
index 41908cf..a65de9c 100644
--- a/airflow/contrib/hooks/opsgenie_alert_hook.py
+++ b/airflow/contrib/hooks/opsgenie_alert_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.opsgenie.hooks.opsgenie_alert import OpsgenieAlertHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/pagerduty_hook.py b/airflow/contrib/hooks/pagerduty_hook.py
index facf44e..33797b0 100644
--- a/airflow/contrib/hooks/pagerduty_hook.py
+++ b/airflow/contrib/hooks/pagerduty_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.pagerduty.hooks.pagerduty import PagerdutyHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/pinot_hook.py b/airflow/contrib/hooks/pinot_hook.py
index 43c5f86..159677f 100644
--- a/airflow/contrib/hooks/pinot_hook.py
+++ b/airflow/contrib/hooks/pinot_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.pinot.hooks.pinot import PinotAdminHook, PinotDbApiHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/qubole_check_hook.py b/airflow/contrib/hooks/qubole_check_hook.py
index 86479f3..0a674d7 100644
--- a/airflow/contrib/hooks/qubole_check_hook.py
+++ b/airflow/contrib/hooks/qubole_check_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.qubole.hooks.qubole_check import QuboleCheckHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/qubole_hook.py b/airflow/contrib/hooks/qubole_hook.py
index ba50756..6a695bc 100644
--- a/airflow/contrib/hooks/qubole_hook.py
+++ b/airflow/contrib/hooks/qubole_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.qubole.hooks.qubole import QuboleHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/redis_hook.py b/airflow/contrib/hooks/redis_hook.py
index 69390a1..57bdab5 100644
--- a/airflow/contrib/hooks/redis_hook.py
+++ b/airflow/contrib/hooks/redis_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.redis.hooks.redis import RedisHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/redshift_hook.py b/airflow/contrib/hooks/redshift_hook.py
index 3cdda45..ebaac1b 100644
--- a/airflow/contrib/hooks/redshift_hook.py
+++ b/airflow/contrib/hooks/redshift_hook.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.redshift import RedshiftHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/sagemaker_hook.py b/airflow/contrib/hooks/sagemaker_hook.py
index 002a39a..321f25b 100644
--- a/airflow/contrib/hooks/sagemaker_hook.py
+++ b/airflow/contrib/hooks/sagemaker_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.sagemaker import (  # noqa
     LogState,
     Position,
diff --git a/airflow/contrib/hooks/salesforce_hook.py b/airflow/contrib/hooks/salesforce_hook.py
index bcde12d..a707a52 100644
--- a/airflow/contrib/hooks/salesforce_hook.py
+++ b/airflow/contrib/hooks/salesforce_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.salesforce.hooks.salesforce import SalesforceHook, pd  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/segment_hook.py b/airflow/contrib/hooks/segment_hook.py
index e702817..6da6257 100644
--- a/airflow/contrib/hooks/segment_hook.py
+++ b/airflow/contrib/hooks/segment_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.segment.hooks.segment import SegmentHook, analytics  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/sftp_hook.py b/airflow/contrib/hooks/sftp_hook.py
index 7b91ee3..0153e8e 100644
--- a/airflow/contrib/hooks/sftp_hook.py
+++ b/airflow/contrib/hooks/sftp_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.sftp.hooks.sftp import SFTPHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/slack_webhook_hook.py b/airflow/contrib/hooks/slack_webhook_hook.py
index 68e4195..f438d11 100644
--- a/airflow/contrib/hooks/slack_webhook_hook.py
+++ b/airflow/contrib/hooks/slack_webhook_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/snowflake_hook.py b/airflow/contrib/hooks/snowflake_hook.py
index afda9e1..804bacc 100644
--- a/airflow/contrib/hooks/snowflake_hook.py
+++ b/airflow/contrib/hooks/snowflake_hook.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/spark_jdbc_hook.py b/airflow/contrib/hooks/spark_jdbc_hook.py
index ca29699..1b48d09 100644
--- a/airflow/contrib/hooks/spark_jdbc_hook.py
+++ b/airflow/contrib/hooks/spark_jdbc_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.spark.hooks.spark_jdbc import SparkJDBCHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/spark_sql_hook.py b/airflow/contrib/hooks/spark_sql_hook.py
index cbac63f..6b262ed 100644
--- a/airflow/contrib/hooks/spark_sql_hook.py
+++ b/airflow/contrib/hooks/spark_sql_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.spark.hooks.spark_sql import SparkSqlHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/spark_submit_hook.py b/airflow/contrib/hooks/spark_submit_hook.py
index 9a30d77..fbdbf4f 100644
--- a/airflow/contrib/hooks/spark_submit_hook.py
+++ b/airflow/contrib/hooks/spark_submit_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.spark.hooks.spark_submit import SparkSubmitHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/sqoop_hook.py b/airflow/contrib/hooks/sqoop_hook.py
index 07d07de..f231c0f 100644
--- a/airflow/contrib/hooks/sqoop_hook.py
+++ b/airflow/contrib/hooks/sqoop_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.sqoop.hooks.sqoop import SqoopHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/ssh_hook.py b/airflow/contrib/hooks/ssh_hook.py
index 75cb320..ef3000d 100644
--- a/airflow/contrib/hooks/ssh_hook.py
+++ b/airflow/contrib/hooks/ssh_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.ssh.hooks.ssh import SSHHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/vertica_hook.py b/airflow/contrib/hooks/vertica_hook.py
index 4aff07c..fc84b22 100644
--- a/airflow/contrib/hooks/vertica_hook.py
+++ b/airflow/contrib/hooks/vertica_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.vertica.hooks.vertica import VerticaHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/wasb_hook.py b/airflow/contrib/hooks/wasb_hook.py
index d256aef..3b5eb65 100644
--- a/airflow/contrib/hooks/wasb_hook.py
+++ b/airflow/contrib/hooks/wasb_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.hooks.wasb import WasbHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/hooks/winrm_hook.py b/airflow/contrib/hooks/winrm_hook.py
index e22f3b6..35e7db2 100644
--- a/airflow/contrib/hooks/winrm_hook.py
+++ b/airflow/contrib/hooks/winrm_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.winrm.hooks.winrm import WinRMHook  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/adls_list_operator.py b/airflow/contrib/operators/adls_list_operator.py
index cb43574..b924342 100644
--- a/airflow/contrib/operators/adls_list_operator.py
+++ b/airflow/contrib/operators/adls_list_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.operators.adls_list import AzureDataLakeStorageListOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/aws_athena_operator.py b/airflow/contrib/operators/aws_athena_operator.py
index 97c12c9..e799c74 100644
--- a/airflow/contrib/operators/aws_athena_operator.py
+++ b/airflow/contrib/operators/aws_athena_operator.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.athena import AWSAthenaOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/aws_sqs_publish_operator.py b/airflow/contrib/operators/aws_sqs_publish_operator.py
index c7c2ca3..b397d9e 100644
--- a/airflow/contrib/operators/aws_sqs_publish_operator.py
+++ b/airflow/contrib/operators/aws_sqs_publish_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sqs import SQSPublishOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/awsbatch_operator.py b/airflow/contrib/operators/awsbatch_operator.py
index 0d1c5b0..a6be224 100644
--- a/airflow/contrib/operators/awsbatch_operator.py
+++ b/airflow/contrib/operators/awsbatch_operator.py
@@ -41,7 +41,6 @@ warnings.warn(
 )
 
 
-# pylint: disable=too-many-ancestors
 class AWSBatchOperator(AwsBatchOperator):
     """
     This class is deprecated. Please use
diff --git a/airflow/contrib/operators/azure_container_instances_operator.py b/airflow/contrib/operators/azure_container_instances_operator.py
index 7efd008..8b6a32a 100644
--- a/airflow/contrib/operators/azure_container_instances_operator.py
+++ b/airflow/contrib/operators/azure_container_instances_operator.py
@@ -21,7 +21,6 @@ This module is deprecated. Please use
 """
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.operators.azure_container_instances import (  # noqa
     AzureContainerInstancesOperator,
 )
diff --git a/airflow/contrib/operators/azure_cosmos_operator.py b/airflow/contrib/operators/azure_cosmos_operator.py
index f4bec65..6c08728 100644
--- a/airflow/contrib/operators/azure_cosmos_operator.py
+++ b/airflow/contrib/operators/azure_cosmos_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.operators.azure_cosmos import AzureCosmosInsertDocumentOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/bigquery_check_operator.py b/airflow/contrib/operators/bigquery_check_operator.py
index ff021fb..39b658c 100644
--- a/airflow/contrib/operators/bigquery_check_operator.py
+++ b/airflow/contrib/operators/bigquery_check_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.operators.bigquery import (  # noqa
     BigQueryCheckOperator,
     BigQueryIntervalCheckOperator,
diff --git a/airflow/contrib/operators/bigquery_get_data.py b/airflow/contrib/operators/bigquery_get_data.py
index 8434cb4..00c8575 100644
--- a/airflow/contrib/operators/bigquery_get_data.py
+++ b/airflow/contrib/operators/bigquery_get_data.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.operators.bigquery import BigQueryGetDataOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/bigquery_operator.py b/airflow/contrib/operators/bigquery_operator.py
index ca9a845..6fe8f08 100644
--- a/airflow/contrib/operators/bigquery_operator.py
+++ b/airflow/contrib/operators/bigquery_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.operators.bigquery import (  # noqa; noqa; noqa; noqa; noqa
     BigQueryCreateEmptyDatasetOperator,
     BigQueryCreateEmptyTableOperator,
diff --git a/airflow/contrib/operators/bigquery_to_bigquery.py b/airflow/contrib/operators/bigquery_to_bigquery.py
index 43c05ec..84c26fb 100644
--- a/airflow/contrib/operators/bigquery_to_bigquery.py
+++ b/airflow/contrib/operators/bigquery_to_bigquery.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.google.cloud.transfers.bigquery_to_bigquery`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/bigquery_to_mysql_operator.py b/airflow/contrib/operators/bigquery_to_mysql_operator.py
index bd9f0ac..401921c 100644
--- a/airflow/contrib/operators/bigquery_to_mysql_operator.py
+++ b/airflow/contrib/operators/bigquery_to_mysql_operator.py
@@ -21,7 +21,6 @@ Please use :mod:`airflow.providers.google.cloud.transfers.bigquery_to_mysql`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.transfers.bigquery_to_mysql import BigQueryToMySqlOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/databricks_operator.py b/airflow/contrib/operators/databricks_operator.py
index 90edb60..b591dd6 100644
--- a/airflow/contrib/operators/databricks_operator.py
+++ b/airflow/contrib/operators/databricks_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.databricks.operators.databricks import (  # noqa
     DatabricksRunNowOperator,
     DatabricksSubmitRunOperator,
diff --git a/airflow/contrib/operators/dingding_operator.py b/airflow/contrib/operators/dingding_operator.py
index 7abbac3..bfe91e8 100644
--- a/airflow/contrib/operators/dingding_operator.py
+++ b/airflow/contrib/operators/dingding_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.dingding.operators.dingding import DingdingOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/discord_webhook_operator.py b/airflow/contrib/operators/discord_webhook_operator.py
index 2dcc117..be5809a 100644
--- a/airflow/contrib/operators/discord_webhook_operator.py
+++ b/airflow/contrib/operators/discord_webhook_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.discord.operators.discord_webhook import DiscordWebhookOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/docker_swarm_operator.py b/airflow/contrib/operators/docker_swarm_operator.py
index 041f4fd..b023da7 100644
--- a/airflow/contrib/operators/docker_swarm_operator.py
+++ b/airflow/contrib/operators/docker_swarm_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.docker.operators.docker_swarm import DockerSwarmOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/druid_operator.py b/airflow/contrib/operators/druid_operator.py
index 1a35b0f..20dff77 100644
--- a/airflow/contrib/operators/druid_operator.py
+++ b/airflow/contrib/operators/druid_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.druid.operators.druid import DruidOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/dynamodb_to_s3.py b/airflow/contrib/operators/dynamodb_to_s3.py
index 3785d8e..a205400 100644
--- a/airflow/contrib/operators/dynamodb_to_s3.py
+++ b/airflow/contrib/operators/dynamodb_to_s3.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/ecs_operator.py b/airflow/contrib/operators/ecs_operator.py
index a7865b9..e460a6a 100644
--- a/airflow/contrib/operators/ecs_operator.py
+++ b/airflow/contrib/operators/ecs_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.ecs import ECSOperator, ECSProtocol as NewECSProtocol  # noqa
 from airflow.typing_compat import Protocol, runtime_checkable
 
diff --git a/airflow/contrib/operators/emr_add_steps_operator.py b/airflow/contrib/operators/emr_add_steps_operator.py
index e03c0c6..e53f284 100644
--- a/airflow/contrib/operators/emr_add_steps_operator.py
+++ b/airflow/contrib/operators/emr_add_steps_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.emr_add_steps import EmrAddStepsOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/emr_create_job_flow_operator.py b/airflow/contrib/operators/emr_create_job_flow_operator.py
index 95950e7..14344a9 100644
--- a/airflow/contrib/operators/emr_create_job_flow_operator.py
+++ b/airflow/contrib/operators/emr_create_job_flow_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.emr_create_job_flow import EmrCreateJobFlowOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/emr_terminate_job_flow_operator.py b/airflow/contrib/operators/emr_terminate_job_flow_operator.py
index 86e0849..7c73bc3 100644
--- a/airflow/contrib/operators/emr_terminate_job_flow_operator.py
+++ b/airflow/contrib/operators/emr_terminate_job_flow_operator.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.amazon.aws.operators.emr_terminate_job_flow`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.emr_terminate_job_flow import EmrTerminateJobFlowOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/file_to_wasb.py b/airflow/contrib/operators/file_to_wasb.py
index d288c25..2af0476 100644
--- a/airflow/contrib/operators/file_to_wasb.py
+++ b/airflow/contrib/operators/file_to_wasb.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.microsoft.azure.transfers.file_to_wasb`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.transfers.file_to_wasb import FileToWasbOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/gcp_bigtable_operator.py b/airflow/contrib/operators/gcp_bigtable_operator.py
index b07a04c..f45fde7 100644
--- a/airflow/contrib/operators/gcp_bigtable_operator.py
+++ b/airflow/contrib/operators/gcp_bigtable_operator.py
@@ -119,7 +119,6 @@ class BigtableTableDeleteOperator(BigtableDeleteTableOperator):
         super().__init__(*args, **kwargs)
 
 
-# pylint: disable=too-many-ancestors
 class BigtableTableWaitForReplicationSensor(BigtableTableReplicationCompletedSensor):
     """
     This class is deprecated.
diff --git a/airflow/contrib/operators/gcp_cloud_build_operator.py b/airflow/contrib/operators/gcp_cloud_build_operator.py
index b07a295..443fdbf 100644
--- a/airflow/contrib/operators/gcp_cloud_build_operator.py
+++ b/airflow/contrib/operators/gcp_cloud_build_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.operators.cloud_build import CloudBuildCreateBuildOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/gcp_dlp_operator.py b/airflow/contrib/operators/gcp_dlp_operator.py
index a5f4cae..f5b4c07 100644
--- a/airflow/contrib/operators/gcp_dlp_operator.py
+++ b/airflow/contrib/operators/gcp_dlp_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.operators.dlp import (  # noqa
     CloudDLPCancelDLPJobOperator,
     CloudDLPCreateDeidentifyTemplateOperator,
diff --git a/airflow/contrib/operators/gcp_tasks_operator.py b/airflow/contrib/operators/gcp_tasks_operator.py
index 62ea14c..319ddb4 100644
--- a/airflow/contrib/operators/gcp_tasks_operator.py
+++ b/airflow/contrib/operators/gcp_tasks_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.operators.tasks import (  # noqa
     CloudTasksQueueCreateOperator,
     CloudTasksQueueDeleteOperator,
diff --git a/airflow/contrib/operators/gcp_translate_operator.py b/airflow/contrib/operators/gcp_translate_operator.py
index dc1caca..c61cc84 100644
--- a/airflow/contrib/operators/gcp_translate_operator.py
+++ b/airflow/contrib/operators/gcp_translate_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.operators.translate import CloudTranslateTextOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/gcp_translate_speech_operator.py b/airflow/contrib/operators/gcp_translate_speech_operator.py
index f8f8cc5..2e0bb70 100644
--- a/airflow/contrib/operators/gcp_translate_speech_operator.py
+++ b/airflow/contrib/operators/gcp_translate_speech_operator.py
@@ -22,8 +22,7 @@ Please use :mod:`airflow.providers.google.cloud.operators.translate_speech`.
 
 import warnings
 
-# pylint: disable=unused-import
-from airflow.providers.google.cloud.operators.translate_speech import CloudTranslateSpeechOperator  # noqa
+from airflow.providers.google.cloud.operators.translate_speech import CloudTranslateSpeechOperator
 
 warnings.warn(
     "This module is deprecated. Please use `airflow.providers.google.cloud.operators.translate_speech`.",
diff --git a/airflow/contrib/operators/gcp_video_intelligence_operator.py b/airflow/contrib/operators/gcp_video_intelligence_operator.py
index edc9539..a82fc9a 100644
--- a/airflow/contrib/operators/gcp_video_intelligence_operator.py
+++ b/airflow/contrib/operators/gcp_video_intelligence_operator.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.google.cloud.operators.video_intelligence`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.operators.video_intelligence import (  # noqa
     CloudVideoIntelligenceDetectVideoExplicitContentOperator,
     CloudVideoIntelligenceDetectVideoLabelsOperator,
diff --git a/airflow/contrib/operators/gcp_vision_operator.py b/airflow/contrib/operators/gcp_vision_operator.py
index 18424cf..09a5b1e 100644
--- a/airflow/contrib/operators/gcp_vision_operator.py
+++ b/airflow/contrib/operators/gcp_vision_operator.py
@@ -19,7 +19,7 @@
 
 import warnings
 
-from airflow.providers.google.cloud.operators.vision import (  # noqa # pylint: disable=unused-import
+from airflow.providers.google.cloud.operators.vision import (  # noqa
     CloudVisionAddProductToProductSetOperator,
     CloudVisionCreateProductOperator,
     CloudVisionCreateProductSetOperator,
diff --git a/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py b/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py
index 99d7ca2..75a672f 100644
--- a/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py
+++ b/airflow/contrib/operators/gcs_to_gcs_transfer_operator.py
@@ -22,8 +22,6 @@ Please use `airflow.providers.google.cloud.operators.cloud_storage_transfer_serv
 
 import warnings
 
-# pylint: disable=unused-import
-
 warnings.warn(
     "This module is deprecated. "
     "Please use `airflow.providers.google.cloud.operators.cloud_storage_transfer_service`.",
diff --git a/airflow/contrib/operators/gcs_to_gdrive_operator.py b/airflow/contrib/operators/gcs_to_gdrive_operator.py
index 72627ce..1fb55d1 100644
--- a/airflow/contrib/operators/gcs_to_gdrive_operator.py
+++ b/airflow/contrib/operators/gcs_to_gdrive_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/grpc_operator.py b/airflow/contrib/operators/grpc_operator.py
index dbf49c6..bd8cfbd 100644
--- a/airflow/contrib/operators/grpc_operator.py
+++ b/airflow/contrib/operators/grpc_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.grpc.operators.grpc import GrpcOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/hive_to_dynamodb.py b/airflow/contrib/operators/hive_to_dynamodb.py
index 3cd467e..ba4f8b9 100644
--- a/airflow/contrib/operators/hive_to_dynamodb.py
+++ b/airflow/contrib/operators/hive_to_dynamodb.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.transfers.hive_to_dynamodb import HiveToDynamoDBOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/imap_attachment_to_s3_operator.py b/airflow/contrib/operators/imap_attachment_to_s3_operator.py
index ea88e6e..e82a8bc 100644
--- a/airflow/contrib/operators/imap_attachment_to_s3_operator.py
+++ b/airflow/contrib/operators/imap_attachment_to_s3_operator.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/jenkins_job_trigger_operator.py b/airflow/contrib/operators/jenkins_job_trigger_operator.py
index 9fde004..0b401d2 100644
--- a/airflow/contrib/operators/jenkins_job_trigger_operator.py
+++ b/airflow/contrib/operators/jenkins_job_trigger_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.jenkins.operators.jenkins_job_trigger import JenkinsJobTriggerOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/jira_operator.py b/airflow/contrib/operators/jira_operator.py
index a9a919b..b6e3b3e 100644
--- a/airflow/contrib/operators/jira_operator.py
+++ b/airflow/contrib/operators/jira_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.jira.operators.jira import JiraOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/kubernetes_pod_operator.py b/airflow/contrib/operators/kubernetes_pod_operator.py
index e4d264f..962fa22 100644
--- a/airflow/contrib/operators/kubernetes_pod_operator.py
+++ b/airflow/contrib/operators/kubernetes_pod_operator.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.cncf.kubernetes.operators.kubernetes_pod`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/mongo_to_s3.py b/airflow/contrib/operators/mongo_to_s3.py
index 6a76e3b..17b0676 100644
--- a/airflow/contrib/operators/mongo_to_s3.py
+++ b/airflow/contrib/operators/mongo_to_s3.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/opsgenie_alert_operator.py b/airflow/contrib/operators/opsgenie_alert_operator.py
index de3459f..b8ce1dd 100644
--- a/airflow/contrib/operators/opsgenie_alert_operator.py
+++ b/airflow/contrib/operators/opsgenie_alert_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.opsgenie.operators.opsgenie_alert import OpsgenieAlertOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py
index c27d948..3907b6f 100644
--- a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py
+++ b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py
@@ -22,7 +22,6 @@ Please use `airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lak
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake import (  # noqa
     OracleToAzureDataLakeOperator,
 )
diff --git a/airflow/contrib/operators/oracle_to_oracle_transfer.py b/airflow/contrib/operators/oracle_to_oracle_transfer.py
index 905cba5..2efbf4d 100644
--- a/airflow/contrib/operators/oracle_to_oracle_transfer.py
+++ b/airflow/contrib/operators/oracle_to_oracle_transfer.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.oracle.transfers.oracle_to_oracle`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.oracle.transfers.oracle_to_oracle import OracleToOracleOperator
 
 warnings.warn(
diff --git a/airflow/contrib/operators/qubole_check_operator.py b/airflow/contrib/operators/qubole_check_operator.py
index 14f5bd9..e42a9e0 100644
--- a/airflow/contrib/operators/qubole_check_operator.py
+++ b/airflow/contrib/operators/qubole_check_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.qubole.operators.qubole_check import (  # noqa
     QuboleCheckOperator,
     QuboleValueCheckOperator,
diff --git a/airflow/contrib/operators/qubole_operator.py b/airflow/contrib/operators/qubole_operator.py
index 171e275..e4a3074 100644
--- a/airflow/contrib/operators/qubole_operator.py
+++ b/airflow/contrib/operators/qubole_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.qubole.operators.qubole import QuboleOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/redis_publish_operator.py b/airflow/contrib/operators/redis_publish_operator.py
index 12f9261..994d932 100644
--- a/airflow/contrib/operators/redis_publish_operator.py
+++ b/airflow/contrib/operators/redis_publish_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.redis.operators.redis_publish import RedisPublishOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/s3_copy_object_operator.py b/airflow/contrib/operators/s3_copy_object_operator.py
index 287c081..cbe9c63 100644
--- a/airflow/contrib/operators/s3_copy_object_operator.py
+++ b/airflow/contrib/operators/s3_copy_object_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.s3_copy_object import S3CopyObjectOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/s3_delete_objects_operator.py b/airflow/contrib/operators/s3_delete_objects_operator.py
index 2e55dc5..a0ab210 100644
--- a/airflow/contrib/operators/s3_delete_objects_operator.py
+++ b/airflow/contrib/operators/s3_delete_objects_operator.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.amazon.aws.operators.s3_delete_objects`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.s3_delete_objects import S3DeleteObjectsOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/s3_list_operator.py b/airflow/contrib/operators/s3_list_operator.py
index b9eb354..172b94c 100644
--- a/airflow/contrib/operators/s3_list_operator.py
+++ b/airflow/contrib/operators/s3_list_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.s3_list import S3ListOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/s3_to_gcs_operator.py b/airflow/contrib/operators/s3_to_gcs_operator.py
index 18cfcd3..d0ea8e0 100644
--- a/airflow/contrib/operators/s3_to_gcs_operator.py
+++ b/airflow/contrib/operators/s3_to_gcs_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/s3_to_gcs_transfer_operator.py b/airflow/contrib/operators/s3_to_gcs_transfer_operator.py
index d82657b..71df062 100644
--- a/airflow/contrib/operators/s3_to_gcs_transfer_operator.py
+++ b/airflow/contrib/operators/s3_to_gcs_transfer_operator.py
@@ -21,7 +21,6 @@ Please use `airflow.providers.google.cloud.operators.cloud_storage_transfer_serv
 """
 import warnings
 
-# pylint: disable=unused-import,line-too-long
 from airflow.providers.google.cloud.operators.cloud_storage_transfer_service import (  # noqa isort:skip
     CloudDataTransferServiceS3ToGCSOperator,
 )
diff --git a/airflow/contrib/operators/s3_to_sftp_operator.py b/airflow/contrib/operators/s3_to_sftp_operator.py
index 5de4330..e129af1 100644
--- a/airflow/contrib/operators/s3_to_sftp_operator.py
+++ b/airflow/contrib/operators/s3_to_sftp_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sagemaker_base_operator.py b/airflow/contrib/operators/sagemaker_base_operator.py
index 551502f..4c2c8f6 100644
--- a/airflow/contrib/operators/sagemaker_base_operator.py
+++ b/airflow/contrib/operators/sagemaker_base_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sagemaker_base import SageMakerBaseOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sagemaker_endpoint_config_operator.py b/airflow/contrib/operators/sagemaker_endpoint_config_operator.py
index cf828d4..43945b2 100644
--- a/airflow/contrib/operators/sagemaker_endpoint_config_operator.py
+++ b/airflow/contrib/operators/sagemaker_endpoint_config_operator.py
@@ -22,7 +22,6 @@ Please use `airflow.providers.amazon.aws.operators.sagemaker_endpoint_config`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sagemaker_endpoint_config import (  # noqa
     SageMakerEndpointConfigOperator,
 )
diff --git a/airflow/contrib/operators/sagemaker_endpoint_operator.py b/airflow/contrib/operators/sagemaker_endpoint_operator.py
index 363bea1..fe175a6 100644
--- a/airflow/contrib/operators/sagemaker_endpoint_operator.py
+++ b/airflow/contrib/operators/sagemaker_endpoint_operator.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.amazon.aws.operators.sagemaker_endpoint`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sagemaker_endpoint import SageMakerEndpointOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sagemaker_model_operator.py b/airflow/contrib/operators/sagemaker_model_operator.py
index efcfc58..9a00348 100644
--- a/airflow/contrib/operators/sagemaker_model_operator.py
+++ b/airflow/contrib/operators/sagemaker_model_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sagemaker_model import SageMakerModelOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sagemaker_training_operator.py b/airflow/contrib/operators/sagemaker_training_operator.py
index 6502df0..d3749c6 100644
--- a/airflow/contrib/operators/sagemaker_training_operator.py
+++ b/airflow/contrib/operators/sagemaker_training_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sagemaker_training import SageMakerTrainingOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sagemaker_transform_operator.py b/airflow/contrib/operators/sagemaker_transform_operator.py
index ca8d874..93cf707 100644
--- a/airflow/contrib/operators/sagemaker_transform_operator.py
+++ b/airflow/contrib/operators/sagemaker_transform_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sagemaker_transform import SageMakerTransformOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sagemaker_tuning_operator.py b/airflow/contrib/operators/sagemaker_tuning_operator.py
index 8885189..05760a7 100644
--- a/airflow/contrib/operators/sagemaker_tuning_operator.py
+++ b/airflow/contrib/operators/sagemaker_tuning_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sagemaker_tuning import SageMakerTuningOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/segment_track_event_operator.py b/airflow/contrib/operators/segment_track_event_operator.py
index 759f4b9..92419a1 100644
--- a/airflow/contrib/operators/segment_track_event_operator.py
+++ b/airflow/contrib/operators/segment_track_event_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.segment.operators.segment_track_event import SegmentTrackEventOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sftp_operator.py b/airflow/contrib/operators/sftp_operator.py
index 0cd44ee..e73a847 100644
--- a/airflow/contrib/operators/sftp_operator.py
+++ b/airflow/contrib/operators/sftp_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.sftp.operators.sftp import SFTPOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sftp_to_s3_operator.py b/airflow/contrib/operators/sftp_to_s3_operator.py
index c5f1f17..7c13b18 100644
--- a/airflow/contrib/operators/sftp_to_s3_operator.py
+++ b/airflow/contrib/operators/sftp_to_s3_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/slack_webhook_operator.py b/airflow/contrib/operators/slack_webhook_operator.py
index f50ba53..f271102 100644
--- a/airflow/contrib/operators/slack_webhook_operator.py
+++ b/airflow/contrib/operators/slack_webhook_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.slack.operators.slack_webhook import SlackWebhookOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/snowflake_operator.py b/airflow/contrib/operators/snowflake_operator.py
index a81ac10..f01cc72 100644
--- a/airflow/contrib/operators/snowflake_operator.py
+++ b/airflow/contrib/operators/snowflake_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.snowflake.operators.snowflake import SnowflakeOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sns_publish_operator.py b/airflow/contrib/operators/sns_publish_operator.py
index b79cf46..104e240 100644
--- a/airflow/contrib/operators/sns_publish_operator.py
+++ b/airflow/contrib/operators/sns_publish_operator.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.sns import SnsPublishOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/spark_jdbc_operator.py b/airflow/contrib/operators/spark_jdbc_operator.py
index 1657fb2..fc3cdc0 100644
--- a/airflow/contrib/operators/spark_jdbc_operator.py
+++ b/airflow/contrib/operators/spark_jdbc_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.spark.operators.spark_jdbc import SparkJDBCOperator, SparkSubmitOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/spark_sql_operator.py b/airflow/contrib/operators/spark_sql_operator.py
index 7c4d3d4..19e20d2 100644
--- a/airflow/contrib/operators/spark_sql_operator.py
+++ b/airflow/contrib/operators/spark_sql_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.spark.operators.spark_sql import SparkSqlOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/spark_submit_operator.py b/airflow/contrib/operators/spark_submit_operator.py
index a0f9edd..103187e 100644
--- a/airflow/contrib/operators/spark_submit_operator.py
+++ b/airflow/contrib/operators/spark_submit_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.spark.operators.spark_submit import SparkSubmitOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/sqoop_operator.py b/airflow/contrib/operators/sqoop_operator.py
index 0cee705..2757847 100644
--- a/airflow/contrib/operators/sqoop_operator.py
+++ b/airflow/contrib/operators/sqoop_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.sqoop.operators.sqoop import SqoopOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/ssh_operator.py b/airflow/contrib/operators/ssh_operator.py
index 401ca43..56f94b9 100644
--- a/airflow/contrib/operators/ssh_operator.py
+++ b/airflow/contrib/operators/ssh_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.ssh.operators.ssh import SSHOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/vertica_operator.py b/airflow/contrib/operators/vertica_operator.py
index 38d13d5..e652512 100644
--- a/airflow/contrib/operators/vertica_operator.py
+++ b/airflow/contrib/operators/vertica_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.vertica.operators.vertica import VerticaOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/vertica_to_mysql.py b/airflow/contrib/operators/vertica_to_mysql.py
index 35b0df6..c85738f 100644
--- a/airflow/contrib/operators/vertica_to_mysql.py
+++ b/airflow/contrib/operators/vertica_to_mysql.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.mysql.transfers.vertica_to_mysql`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.mysql.transfers.vertica_to_mysql import VerticaToMySqlOperator
 
 warnings.warn(
diff --git a/airflow/contrib/operators/wasb_delete_blob_operator.py b/airflow/contrib/operators/wasb_delete_blob_operator.py
index d97cdc0..cbf11b3 100644
--- a/airflow/contrib/operators/wasb_delete_blob_operator.py
+++ b/airflow/contrib/operators/wasb_delete_blob_operator.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.microsoft.azure.operators.wasb_delete_blob`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.operators.wasb_delete_blob import WasbDeleteBlobOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/operators/winrm_operator.py b/airflow/contrib/operators/winrm_operator.py
index dcdceb3..fcc6213 100644
--- a/airflow/contrib/operators/winrm_operator.py
+++ b/airflow/contrib/operators/winrm_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.winrm.operators.winrm import WinRMOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/secrets/aws_secrets_manager.py b/airflow/contrib/secrets/aws_secrets_manager.py
index cac72cd..833b03a 100644
--- a/airflow/contrib/secrets/aws_secrets_manager.py
+++ b/airflow/contrib/secrets/aws_secrets_manager.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.secrets.secrets_manager import SecretsManagerBackend  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/secrets/aws_systems_manager.py b/airflow/contrib/secrets/aws_systems_manager.py
index 3e443e1..4c7a30c 100644
--- a/airflow/contrib/secrets/aws_systems_manager.py
+++ b/airflow/contrib/secrets/aws_systems_manager.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.secrets.systems_manager import SystemsManagerParameterStoreBackend  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/secrets/azure_key_vault.py b/airflow/contrib/secrets/azure_key_vault.py
index e29fcd3..f254ded 100644
--- a/airflow/contrib/secrets/azure_key_vault.py
+++ b/airflow/contrib/secrets/azure_key_vault.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.secrets.azure_key_vault import AzureKeyVaultBackend  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/secrets/gcp_secrets_manager.py b/airflow/contrib/secrets/gcp_secrets_manager.py
index 797e133..7caa7ea 100644
--- a/airflow/contrib/secrets/gcp_secrets_manager.py
+++ b/airflow/contrib/secrets/gcp_secrets_manager.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.secrets.secret_manager import CloudSecretManagerBackend
 
 warnings.warn(
diff --git a/airflow/contrib/secrets/hashicorp_vault.py b/airflow/contrib/secrets/hashicorp_vault.py
index 3093280..a3158d5 100644
--- a/airflow/contrib/secrets/hashicorp_vault.py
+++ b/airflow/contrib/secrets/hashicorp_vault.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.hashicorp.secrets.vault import VaultBackend  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/aws_athena_sensor.py b/airflow/contrib/sensors/aws_athena_sensor.py
index fbf585a..ddffc38 100644
--- a/airflow/contrib/sensors/aws_athena_sensor.py
+++ b/airflow/contrib/sensors/aws_athena_sensor.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.athena import AthenaSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py b/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py
index 8378da5..66975a8 100644
--- a/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py
+++ b/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.amazon.aws.sensors.glue_catalog_partition`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.glue_catalog_partition import AwsGlueCatalogPartitionSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/aws_redshift_cluster_sensor.py b/airflow/contrib/sensors/aws_redshift_cluster_sensor.py
index a3086e2..5c1341d 100644
--- a/airflow/contrib/sensors/aws_redshift_cluster_sensor.py
+++ b/airflow/contrib/sensors/aws_redshift_cluster_sensor.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.redshift import AwsRedshiftClusterSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/aws_sqs_sensor.py b/airflow/contrib/sensors/aws_sqs_sensor.py
index 984f227..d836521 100644
--- a/airflow/contrib/sensors/aws_sqs_sensor.py
+++ b/airflow/contrib/sensors/aws_sqs_sensor.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.sqs import SQSSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/azure_cosmos_sensor.py b/airflow/contrib/sensors/azure_cosmos_sensor.py
index 77bc26b..b7c357d 100644
--- a/airflow/contrib/sensors/azure_cosmos_sensor.py
+++ b/airflow/contrib/sensors/azure_cosmos_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.sensors.azure_cosmos import AzureCosmosDocumentSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/bash_sensor.py b/airflow/contrib/sensors/bash_sensor.py
index ac9762c..c3d9c81 100644
--- a/airflow/contrib/sensors/bash_sensor.py
+++ b/airflow/contrib/sensors/bash_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.sensors.bash import STDOUT, BashSensor, Popen, TemporaryDirectory, gettempdir  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/cassandra_record_sensor.py b/airflow/contrib/sensors/cassandra_record_sensor.py
index 908cb8e..cfc3b30 100644
--- a/airflow/contrib/sensors/cassandra_record_sensor.py
+++ b/airflow/contrib/sensors/cassandra_record_sensor.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.cassandra.sensors.record import CassandraRecordSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/cassandra_table_sensor.py b/airflow/contrib/sensors/cassandra_table_sensor.py
index ed09da2..0b7c7aa 100644
--- a/airflow/contrib/sensors/cassandra_table_sensor.py
+++ b/airflow/contrib/sensors/cassandra_table_sensor.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.cassandra.sensors.table import CassandraTableSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/celery_queue_sensor.py b/airflow/contrib/sensors/celery_queue_sensor.py
index 5914318..6ed2be1 100644
--- a/airflow/contrib/sensors/celery_queue_sensor.py
+++ b/airflow/contrib/sensors/celery_queue_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.celery.sensors.celery_queue import CeleryQueueSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/datadog_sensor.py b/airflow/contrib/sensors/datadog_sensor.py
index a1d91f0..d0377d1 100644
--- a/airflow/contrib/sensors/datadog_sensor.py
+++ b/airflow/contrib/sensors/datadog_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.datadog.sensors.datadog import DatadogSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/emr_base_sensor.py b/airflow/contrib/sensors/emr_base_sensor.py
index 7b8965f..08d0efe 100644
--- a/airflow/contrib/sensors/emr_base_sensor.py
+++ b/airflow/contrib/sensors/emr_base_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.emr_base import EmrBaseSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/emr_job_flow_sensor.py b/airflow/contrib/sensors/emr_job_flow_sensor.py
index 4c1debb..429052a 100644
--- a/airflow/contrib/sensors/emr_job_flow_sensor.py
+++ b/airflow/contrib/sensors/emr_job_flow_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.emr_job_flow import EmrJobFlowSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/emr_step_sensor.py b/airflow/contrib/sensors/emr_step_sensor.py
index 9e7e37d..9d4ac9b 100644
--- a/airflow/contrib/sensors/emr_step_sensor.py
+++ b/airflow/contrib/sensors/emr_step_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.emr_step import EmrStepSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/file_sensor.py b/airflow/contrib/sensors/file_sensor.py
index 63ca731..6d75b65 100644
--- a/airflow/contrib/sensors/file_sensor.py
+++ b/airflow/contrib/sensors/file_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.sensors.filesystem import FileSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/ftp_sensor.py b/airflow/contrib/sensors/ftp_sensor.py
index 7d85ea4..76c47c4 100644
--- a/airflow/contrib/sensors/ftp_sensor.py
+++ b/airflow/contrib/sensors/ftp_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.ftp.sensors.ftp import FTPSensor, FTPSSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/hdfs_sensor.py b/airflow/contrib/sensors/hdfs_sensor.py
index 1fa3ce6..d71ec8f 100644
--- a/airflow/contrib/sensors/hdfs_sensor.py
+++ b/airflow/contrib/sensors/hdfs_sensor.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.apache.hdfs.sensors.hdfs`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.hdfs.sensors.hdfs import HdfsFolderSensor, HdfsRegexSensor
 
 warnings.warn(
@@ -32,7 +31,6 @@ warnings.warn(
 )
 
 
-# pylint: disable=too-many-ancestors
 class HdfsSensorFolder(HdfsFolderSensor):
     """This class is deprecated.
 
@@ -51,7 +49,6 @@ class HdfsSensorFolder(HdfsFolderSensor):
         super().__init__(*args, **kwargs)
 
 
-# pylint: disable=too-many-ancestors
 class HdfsSensorRegex(HdfsRegexSensor):
     """This class is deprecated.
 
diff --git a/airflow/contrib/sensors/imap_attachment_sensor.py b/airflow/contrib/sensors/imap_attachment_sensor.py
index a080a34..34d2d7f 100644
--- a/airflow/contrib/sensors/imap_attachment_sensor.py
+++ b/airflow/contrib/sensors/imap_attachment_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.imap.sensors.imap_attachment import ImapAttachmentSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/jira_sensor.py b/airflow/contrib/sensors/jira_sensor.py
index 4e61063..e7c3785 100644
--- a/airflow/contrib/sensors/jira_sensor.py
+++ b/airflow/contrib/sensors/jira_sensor.py
@@ -20,7 +20,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.jira.sensors.jira import JiraSensor, JiraTicketSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/mongo_sensor.py b/airflow/contrib/sensors/mongo_sensor.py
index 9c03e36..13a5f0b 100644
--- a/airflow/contrib/sensors/mongo_sensor.py
+++ b/airflow/contrib/sensors/mongo_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.mongo.sensors.mongo import MongoSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/pubsub_sensor.py b/airflow/contrib/sensors/pubsub_sensor.py
index 08f33f8..eea404e 100644
--- a/airflow/contrib/sensors/pubsub_sensor.py
+++ b/airflow/contrib/sensors/pubsub_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.sensors.pubsub import PubSubPullSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/python_sensor.py b/airflow/contrib/sensors/python_sensor.py
index 3f4bc57..bc7543c 100644
--- a/airflow/contrib/sensors/python_sensor.py
+++ b/airflow/contrib/sensors/python_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.sensors.python import PythonSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/qubole_sensor.py b/airflow/contrib/sensors/qubole_sensor.py
index c17086f..6b65624 100644
--- a/airflow/contrib/sensors/qubole_sensor.py
+++ b/airflow/contrib/sensors/qubole_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.qubole.sensors.qubole import (  # noqa
     QuboleFileSensor,
     QubolePartitionSensor,
diff --git a/airflow/contrib/sensors/redis_key_sensor.py b/airflow/contrib/sensors/redis_key_sensor.py
index 61f6b55..f500c86 100644
--- a/airflow/contrib/sensors/redis_key_sensor.py
+++ b/airflow/contrib/sensors/redis_key_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.redis.sensors.redis_key import RedisKeySensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/redis_pub_sub_sensor.py b/airflow/contrib/sensors/redis_pub_sub_sensor.py
index b708061..16946ac 100644
--- a/airflow/contrib/sensors/redis_pub_sub_sensor.py
+++ b/airflow/contrib/sensors/redis_pub_sub_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.redis.sensors.redis_pub_sub import RedisPubSubSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/sagemaker_base_sensor.py b/airflow/contrib/sensors/sagemaker_base_sensor.py
index 1791a38..86e3233 100644
--- a/airflow/contrib/sensors/sagemaker_base_sensor.py
+++ b/airflow/contrib/sensors/sagemaker_base_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.sagemaker_base import SageMakerBaseSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/sagemaker_endpoint_sensor.py b/airflow/contrib/sensors/sagemaker_endpoint_sensor.py
index 7593e74..5107d6f 100644
--- a/airflow/contrib/sensors/sagemaker_endpoint_sensor.py
+++ b/airflow/contrib/sensors/sagemaker_endpoint_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.sagemaker_endpoint import SageMakerEndpointSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/sagemaker_training_sensor.py b/airflow/contrib/sensors/sagemaker_training_sensor.py
index d933b5c..e8a7650 100644
--- a/airflow/contrib/sensors/sagemaker_training_sensor.py
+++ b/airflow/contrib/sensors/sagemaker_training_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.sagemaker_training import (  # noqa
     SageMakerHook,
     SageMakerTrainingSensor,
diff --git a/airflow/contrib/sensors/sagemaker_transform_sensor.py b/airflow/contrib/sensors/sagemaker_transform_sensor.py
index 98444ea..29fd18f 100644
--- a/airflow/contrib/sensors/sagemaker_transform_sensor.py
+++ b/airflow/contrib/sensors/sagemaker_transform_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.sagemaker_transform import SageMakerTransformSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/sagemaker_tuning_sensor.py b/airflow/contrib/sensors/sagemaker_tuning_sensor.py
index 715936b..7079e4c 100644
--- a/airflow/contrib/sensors/sagemaker_tuning_sensor.py
+++ b/airflow/contrib/sensors/sagemaker_tuning_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.sensors.sagemaker_tuning import SageMakerTuningSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/sftp_sensor.py b/airflow/contrib/sensors/sftp_sensor.py
index 6d630a3..d2700e8 100644
--- a/airflow/contrib/sensors/sftp_sensor.py
+++ b/airflow/contrib/sensors/sftp_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.sftp.sensors.sftp import SFTPSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/wasb_sensor.py b/airflow/contrib/sensors/wasb_sensor.py
index 5fef5bb..d8e0748 100644
--- a/airflow/contrib/sensors/wasb_sensor.py
+++ b/airflow/contrib/sensors/wasb_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.azure.sensors.wasb import WasbBlobSensor, WasbPrefixSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/sensors/weekday_sensor.py b/airflow/contrib/sensors/weekday_sensor.py
index 4d5342d..1f836e1 100644
--- a/airflow/contrib/sensors/weekday_sensor.py
+++ b/airflow/contrib/sensors/weekday_sensor.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.sensors.weekday import DayOfWeekSensor  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/task_runner/cgroup_task_runner.py b/airflow/contrib/task_runner/cgroup_task_runner.py
index 3a996f8..f923126 100644
--- a/airflow/contrib/task_runner/cgroup_task_runner.py
+++ b/airflow/contrib/task_runner/cgroup_task_runner.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.task.task_runner.cgroup_task_runner import CgroupTaskRunner  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/utils/gcp_field_sanitizer.py b/airflow/contrib/utils/gcp_field_sanitizer.py
index a31b6d3..37c0aff 100644
--- a/airflow/contrib/utils/gcp_field_sanitizer.py
+++ b/airflow/contrib/utils/gcp_field_sanitizer.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.utils.field_sanitizer import (  # noqa
     GcpBodyFieldSanitizer,
     GcpFieldSanitizerException,
diff --git a/airflow/contrib/utils/gcp_field_validator.py b/airflow/contrib/utils/gcp_field_validator.py
index 6ac5b4d..fc42dca 100644
--- a/airflow/contrib/utils/gcp_field_validator.py
+++ b/airflow/contrib/utils/gcp_field_validator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.utils.field_validator import (  # noqa
     GcpBodyFieldValidator,
     GcpFieldValidationException,
diff --git a/airflow/contrib/utils/log/task_handler_with_custom_formatter.py b/airflow/contrib/utils/log/task_handler_with_custom_formatter.py
index 6741b92..9bbdee3 100644
--- a/airflow/contrib/utils/log/task_handler_with_custom_formatter.py
+++ b/airflow/contrib/utils/log/task_handler_with_custom_formatter.py
@@ -18,7 +18,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.utils.log.task_handler_with_custom_formatter import TaskHandlerWithCustomFormatter  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/utils/mlengine_operator_utils.py b/airflow/contrib/utils/mlengine_operator_utils.py
index 46373ff..ebd630c 100644
--- a/airflow/contrib/utils/mlengine_operator_utils.py
+++ b/airflow/contrib/utils/mlengine_operator_utils.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.google.cloud.utils.mlengine_operator_utils`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.utils.mlengine_operator_utils import create_evaluate_ops  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/utils/mlengine_prediction_summary.py b/airflow/contrib/utils/mlengine_prediction_summary.py
index 2edc130..ea39052 100644
--- a/airflow/contrib/utils/mlengine_prediction_summary.py
+++ b/airflow/contrib/utils/mlengine_prediction_summary.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.google.cloud.utils.mlengine_prediction_summar
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.google.cloud.utils.mlengine_prediction_summary import JsonCoder, MakeSummary  # noqa
 
 warnings.warn(
diff --git a/airflow/contrib/utils/weekday.py b/airflow/contrib/utils/weekday.py
index ff3dc22..2f2448c 100644
--- a/airflow/contrib/utils/weekday.py
+++ b/airflow/contrib/utils/weekday.py
@@ -17,7 +17,6 @@
 """This module is deprecated. Please use :mod:`airflow.utils.weekday`."""
 import warnings
 
-# pylint: disable=unused-import
 from airflow.utils.weekday import WeekDay  # noqa
 
 warnings.warn(
diff --git a/airflow/decorators/__init__.py b/airflow/decorators/__init__.py
index d1386b0..1250f32 100644
--- a/airflow/decorators/__init__.py
+++ b/airflow/decorators/__init__.py
@@ -19,8 +19,8 @@ from typing import Callable, Dict, Iterable, List, Optional, Union
 
 from airflow.decorators.python import python_task
 from airflow.decorators.python_virtualenv import _virtualenv_task
-from airflow.decorators.task_group import task_group  # noqa # pylint: disable=unused-import
-from airflow.models.dag import dag  # noqa # pylint: disable=unused-import
+from airflow.decorators.task_group import task_group  # noqa
+from airflow.models.dag import dag  # noqa
 
 
 class _TaskDecorator:
diff --git a/airflow/decorators/base.py b/airflow/decorators/base.py
index 2f17980..b133964 100644
--- a/airflow/decorators/base.py
+++ b/airflow/decorators/base.py
@@ -175,7 +175,7 @@ class DecoratedOperator(BaseOperator):
         return args, kwargs
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def task_decorator_factory(
diff --git a/airflow/decorators/python.py b/airflow/decorators/python.py
index f089995..a7251cb 100644
--- a/airflow/decorators/python.py
+++ b/airflow/decorators/python.py
@@ -59,7 +59,7 @@ class _PythonDecoratedOperator(DecoratedOperator, PythonOperator):
         super().__init__(kwargs_to_upstream=kwargs_to_upstream, **kwargs)
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def python_task(
diff --git a/airflow/decorators/python_virtualenv.py b/airflow/decorators/python_virtualenv.py
index 386c267..61e5d1f 100644
--- a/airflow/decorators/python_virtualenv.py
+++ b/airflow/decorators/python_virtualenv.py
@@ -67,7 +67,7 @@ class _PythonVirtualenvDecoratedOperator(DecoratedOperator, PythonVirtualenvOper
         return res
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def _virtualenv_task(
diff --git a/airflow/decorators/task_group.py b/airflow/decorators/task_group.py
index 89283b2..cdd9883 100644
--- a/airflow/decorators/task_group.py
+++ b/airflow/decorators/task_group.py
@@ -25,7 +25,7 @@ from typing import Callable, Optional, TypeVar, cast
 
 from airflow.utils.task_group import TaskGroup
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 task_group_sig = signature(TaskGroup.__init__)
 
diff --git a/airflow/example_dags/example_branch_labels.py b/airflow/example_dags/example_branch_labels.py
index 4f1cb25..926e35d 100644
--- a/airflow/example_dags/example_branch_labels.py
+++ b/airflow/example_dags/example_branch_labels.py
@@ -36,5 +36,5 @@ with DAG("example_branch_labels", schedule_interval="@daily", start_date=days_ag
     report = DummyOperator(task_id="report")
 
     ingest >> analyse >> check
-    check >> Label("No errors") >> save >> report  # pylint: disable=expression-not-assigned
-    check >> Label("Errors found") >> describe >> error >> report  # pylint: disable=expression-not-assigned
+    check >> Label("No errors") >> save >> report
+    check >> Label("Errors found") >> describe >> error >> report
diff --git a/airflow/example_dags/example_branch_operator.py b/airflow/example_dags/example_branch_operator.py
index 6c1fb8f..7928490 100644
--- a/airflow/example_dags/example_branch_operator.py
+++ b/airflow/example_dags/example_branch_operator.py
@@ -65,4 +65,4 @@ with DAG(
         )
 
         # Label is optional here, but it can help identify more complex branches
-        branching >> Label(option) >> t >> dummy_follow >> join  # pylint: disable=expression-not-assigned
+        branching >> Label(option) >> t >> dummy_follow >> join
diff --git a/airflow/example_dags/libs/helper.py b/airflow/example_dags/libs/helper.py
index f80194e..a3d3a72 100644
--- a/airflow/example_dags/libs/helper.py
+++ b/airflow/example_dags/libs/helper.py
@@ -17,6 +17,5 @@
 # under the License.
 
 
-# pylint: disable=missing-docstring
-def print_stuff():  # noqa: D103
+def print_stuff():
     print("annotated!")
diff --git a/airflow/example_dags/tutorial_etl_dag.py b/airflow/example_dags/tutorial_etl_dag.py
index 8b45600..a23c836 100644
--- a/airflow/example_dags/tutorial_etl_dag.py
+++ b/airflow/example_dags/tutorial_etl_dag.py
@@ -16,7 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# pylint: disable=missing-function-docstring
 
 """
 ### ETL DAG Tutorial Documentation
diff --git a/airflow/example_dags/tutorial_taskflow_api_etl.py b/airflow/example_dags/tutorial_taskflow_api_etl.py
index 52745f3..edc589c 100644
--- a/airflow/example_dags/tutorial_taskflow_api_etl.py
+++ b/airflow/example_dags/tutorial_taskflow_api_etl.py
@@ -16,7 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# pylint: disable=missing-function-docstring
 
 # [START tutorial]
 # [START import_module]
diff --git a/airflow/example_dags/tutorial_taskflow_api_etl_virtualenv.py b/airflow/example_dags/tutorial_taskflow_api_etl_virtualenv.py
index 6d00cf6..d2723be 100644
--- a/airflow/example_dags/tutorial_taskflow_api_etl_virtualenv.py
+++ b/airflow/example_dags/tutorial_taskflow_api_etl_virtualenv.py
@@ -16,7 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# pylint: disable=missing-function-docstring
 
 # [START tutorial]
 # [START import_module]
diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py
index 4c976cf..b2c5016 100644
--- a/airflow/executors/celery_executor.py
+++ b/airflow/executors/celery_executor.py
@@ -39,7 +39,7 @@ from celery.backends.base import BaseKeyValueStoreBackend
 from celery.backends.database import DatabaseBackend, Task as TaskDb, session_cleanup
 from celery.result import AsyncResult
 from celery.signals import import_modules as celery_import_modules
-from setproctitle import setproctitle  # pylint: disable=no-name-in-module
+from setproctitle import setproctitle
 
 import airflow.settings as settings
 from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
@@ -116,21 +116,21 @@ def _execute_in_fork(command_to_exec: CommandType) -> None:
 
         args.func(args)
         ret = 0
-    except Exception as e:  # pylint: disable=broad-except
+    except Exception as e:
         log.exception("Failed to execute task %s.", str(e))
         ret = 1
     finally:
         Sentry.flush()
         logging.shutdown()
-        os._exit(ret)  # pylint: disable=protected-access
+        os._exit(ret)
 
 
 def _execute_in_subprocess(command_to_exec: CommandType) -> None:
     env = os.environ.copy()
     try:
-        # pylint: disable=unexpected-keyword-arg
+
         subprocess.check_output(command_to_exec, stderr=subprocess.STDOUT, close_fds=True, env=env)
-        # pylint: disable=unexpected-keyword-arg
+
     except subprocess.CalledProcessError as e:
         log.exception('execute_command encountered a CalledProcessError')
         log.error(e.output)
@@ -166,14 +166,13 @@ def send_task_to_executor(
     try:
         with timeout(seconds=OPERATION_TIMEOUT):
             result = task_to_run.apply_async(args=[command], queue=queue)
-    except Exception as e:  # pylint: disable=broad-except
+    except Exception as e:
         exception_traceback = f"Celery Task ID: {key}\n{traceback.format_exc()}"
         result = ExceptionWithTraceback(e, exception_traceback)
 
     return key, command, result
 
 
-# pylint: disable=unused-import
 @celery_import_modules.connect
 def on_celery_import_modules(*args, **kwargs):
     """
@@ -198,9 +197,6 @@ def on_celery_import_modules(*args, **kwargs):
         pass
 
 
-# pylint: enable=unused-import
-
-
 class CeleryExecutor(BaseExecutor):
     """
     CeleryExecutor is recommended for production use of Airflow. It allows
@@ -292,9 +288,7 @@ class CeleryExecutor(BaseExecutor):
             self.queued_tasks.pop(key)
             self.task_publish_retries.pop(key)
             if isinstance(result, ExceptionWithTraceback):
-                self.log.error(  # pylint: disable=logging-not-lazy
-                    CELERY_SEND_ERR_MSG_HEADER + ": %s\n%s\n", result.exception, result.traceback
-                )
+                self.log.error(CELERY_SEND_ERR_MSG_HEADER + ": %s\n%s\n", result.exception, result.traceback)
                 self.event_buffer[key] = (State.FAILED, None)
             elif result is not None:
                 result.backend = cached_celery_backend
@@ -413,7 +407,7 @@ class CeleryExecutor(BaseExecutor):
                 pass
             else:
                 self.log.info("Unexpected state for %s: %s", key, state)
-        except Exception:  # noqa pylint: disable=broad-except
+        except Exception:
             self.log.exception("Error syncing the Celery executor, ignoring it.")
 
     def end(self, synchronous: bool = False) -> None:
@@ -511,7 +505,7 @@ def fetch_celery_task_state(async_result: AsyncResult) -> Tuple[str, Union[str,
             # to get the current state of the task
             info = async_result.info if hasattr(async_result, 'info') else None
             return async_result.task_id, async_result.state, info
-    except Exception as e:  # pylint: disable=broad-except
+    except Exception as e:
         exception_traceback = f"Celery Task ID: {async_result}\n{traceback.format_exc()}"
         return async_result.task_id, ExceptionWithTraceback(e, exception_traceback), None
 
@@ -592,7 +586,7 @@ class BulkStateFetcher(LoggingMixin):
             states_and_info_by_task_id: MutableMapping[str, EventBufferValueType] = {}
             for task_id, state_or_exception, info in task_id_to_states_and_info:
                 if isinstance(state_or_exception, ExceptionWithTraceback):
-                    self.log.error(  # pylint: disable=logging-not-lazy
+                    self.log.error(
                         CELERY_FETCH_ERR_MSG_HEADER + ":%s\n%s\n",
                         state_or_exception.exception,
                         state_or_exception.traceback,
diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py
index 3bf784c..d9f2313 100644
--- a/airflow/executors/debug_executor.py
+++ b/airflow/executors/debug_executor.py
@@ -49,7 +49,7 @@ class DebugExecutor(BaseExecutor):
         self.tasks_params: Dict[TaskInstanceKey, Dict[str, Any]] = {}
         self.fail_fast = conf.getboolean("debug", "fail_fast")
 
-    def execute_async(self, *args, **kwargs) -> None:  # pylint: disable=signature-differs
+    def execute_async(self, *args, **kwargs) -> None:
         """The method is replaced by custom trigger_task implementation."""
 
     def sync(self) -> None:
@@ -66,7 +66,7 @@ class DebugExecutor(BaseExecutor):
                 self.log.info("Executor is terminated! Stopping %s to %s", ti.key, State.FAILED)
                 ti.set_state(State.FAILED)
                 self.change_state(ti.key, State.FAILED)
-                ti._run_finished_callback()  # pylint: disable=protected-access
+                ti._run_finished_callback()
                 continue
 
             task_succeeded = self._run_task(ti)
@@ -76,14 +76,14 @@ class DebugExecutor(BaseExecutor):
         key = ti.key
         try:
             params = self.tasks_params.pop(ti.key, {})
-            ti._run_raw_task(job_id=ti.job_id, **params)  # pylint: disable=protected-access
+            ti._run_raw_task(job_id=ti.job_id, **params)
             self.change_state(key, State.SUCCESS)
-            ti._run_finished_callback()  # pylint: disable=protected-access
+            ti._run_finished_callback()
             return True
-        except Exception as e:  # pylint: disable=broad-except
+        except Exception as e:
             ti.set_state(State.FAILED)
             self.change_state(key, State.FAILED)
-            ti._run_finished_callback()  # pylint: disable=protected-access
+            ti._run_finished_callback()
             self.log.exception("Failed to execute task: %s.", str(e))
             return False
 
diff --git a/airflow/executors/kubernetes_executor.py b/airflow/executors/kubernetes_executor.py
index 5c42acd..9165687 100644
--- a/airflow/executors/kubernetes_executor.py
+++ b/airflow/executors/kubernetes_executor.py
@@ -27,7 +27,7 @@ import json
 import multiprocessing
 import time
 from datetime import timedelta
-from queue import Empty, Queue  # pylint: disable=unused-import
+from queue import Empty, Queue
 from typing import Any, Dict, List, Optional, Tuple
 
 from kubernetes import client, watch
@@ -451,7 +451,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
         self.log.info('When executor started up, found %s queued task instances', len(queued_tasks))
 
         for task in queued_tasks:
-            # pylint: disable=protected-access
+
             self.log.debug("Checking task %s", task)
             dict_string = "dag_id={},task_id={},execution_date={},airflow-worker={}".format(
                 pod_generator.make_safe_label_value(task.dag_id),
@@ -459,7 +459,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
                 pod_generator.datetime_to_label_safe_datestring(task.execution_date),
                 pod_generator.make_safe_label_value(str(self.scheduler_job_id)),
             )
-            # pylint: enable=protected-access
+
             kwargs = dict(label_selector=dict_string)
             if self.kube_config.kube_client_request_args:
                 for key, value in self.kube_config.kube_client_request_args.items():
@@ -504,7 +504,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
         self.log.info('Add task %s with command %s with executor_config %s', key, command, executor_config)
         try:
             kube_executor_config = PodGenerator.from_obj(executor_config)
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             self.log.error("Invalid executor_config for %s", key)
             self.fail(key=key, info="Invalid executor_config passed")
             return
@@ -537,7 +537,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
         self.kube_scheduler.sync()
 
         last_resource_version = None
-        while True:  # pylint: disable=too-many-nested-blocks
+        while True:
             try:
                 results = self.result_queue.get_nowait()
                 try:
@@ -546,7 +546,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
                     self.log.info('Changing state of %s to %s', results, state)
                     try:
                         self._change_state(key, state, pod_id, namespace)
-                    except Exception as e:  # pylint: disable=broad-except
+                    except Exception as e:
                         self.log.exception(
                             "Exception: %s when attempting to change state of %s to %s, re-queueing.",
                             e,
@@ -562,7 +562,6 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
         resource_instance = ResourceVersion()
         resource_instance.resource_version = last_resource_version or resource_instance.resource_version
 
-        # pylint: disable=too-many-nested-blocks
         for _ in range(self.kube_config.worker_pods_creation_batch_size):
             try:
                 task = self.task_queue.get_nowait()
@@ -583,7 +582,6 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
                     self.task_queue.task_done()
             except Empty:
                 break
-        # pylint: enable=too-many-nested-blocks
 
         # Run any pending timed events
         next_event = self.event_scheduler.run(blocking=False)
@@ -725,7 +723,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
         if not self.result_queue:
             raise AirflowException(NOT_STARTED_MESSAGE)
         self.log.debug('Executor shutting down, result_queue approximate size=%d', self.result_queue.qsize())
-        while True:  # pylint: disable=too-many-nested-blocks
+        while True:
             try:
                 results = self.result_queue.get_nowait()
                 self.log.warning('Executor shutting down, flushing results=%s', results)
@@ -736,7 +734,7 @@ class KubernetesExecutor(BaseExecutor, LoggingMixin):
                     )
                     try:
                         self._change_state(key, state, pod_id, namespace)
-                    except Exception as e:  # pylint: disable=broad-except
+                    except Exception as e:
                         self.log.exception(
                             'Ignoring exception: %s when attempting to change state of %s to %s.',
                             e,
diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py
index a29342f..ab9356f 100644
--- a/airflow/executors/local_executor.py
+++ b/airflow/executors/local_executor.py
@@ -28,18 +28,15 @@ import subprocess
 from abc import abstractmethod
 from multiprocessing import Manager, Process
 from multiprocessing.managers import SyncManager
-from queue import Empty, Queue  # pylint: disable=unused-import  # noqa: F401
-from typing import Any, List, Optional, Tuple, Union  # pylint: disable=unused-import # noqa: F401
+from queue import Empty, Queue
+from typing import Any, List, Optional, Tuple, Union
 
-from setproctitle import setproctitle  # pylint: disable=no-name-in-module
+from setproctitle import setproctitle
 
 from airflow import settings
 from airflow.exceptions import AirflowException
 from airflow.executors.base_executor import NOT_STARTED_MESSAGE, PARALLELISM, BaseExecutor, CommandType
-from airflow.models.taskinstance import (  # pylint: disable=unused-import # noqa: F401
-    TaskInstanceKey,
-    TaskInstanceStateType,
-)
+from airflow.models.taskinstance import TaskInstanceKey, TaskInstanceStateType
 from airflow.utils.log.logging_mixin import LoggingMixin
 from airflow.utils.state import State
 
@@ -123,12 +120,12 @@ class LocalWorkerBase(Process, LoggingMixin):
             args.func(args)
             ret = 0
             return State.SUCCESS
-        except Exception as e:  # pylint: disable=broad-except
+        except Exception as e:
             self.log.error("Failed to execute task %s.", str(e))
         finally:
             Sentry.flush()
             logging.shutdown()
-            os._exit(ret)  # pylint: disable=protected-access
+            os._exit(ret)
             raise RuntimeError('unreachable -- keep mypy happy')
 
     @abstractmethod
@@ -227,7 +224,6 @@ class LocalExecutor(BaseExecutor):
             self.executor.workers_used = 0
             self.executor.workers_active = 0
 
-        # pylint: disable=unused-argument # pragma: no cover
         def execute_async(
             self,
             key: TaskInstanceKey,
@@ -250,7 +246,6 @@ class LocalExecutor(BaseExecutor):
             self.executor.workers_active += 1
             local_worker.start()
 
-        # pylint: enable=unused-argument # pragma: no cover
         def sync(self) -> None:
             """Sync will get called periodically by the heartbeat method."""
             if not self.executor.result_queue:
@@ -302,8 +297,8 @@ class LocalExecutor(BaseExecutor):
             self,
             key: TaskInstanceKey,
             command: CommandType,
-            queue: Optional[str] = None,  # pylint: disable=unused-argument
-            executor_config: Optional[Any] = None,  # pylint: disable=unused-argument
+            queue: Optional[str] = None,
+            executor_config: Optional[Any] = None,
         ) -> None:
             """
             Executes task asynchronously.
diff --git a/airflow/hooks/S3_hook.py b/airflow/hooks/S3_hook.py
index cc0fd67..b59311a 100644
--- a/airflow/hooks/S3_hook.py
+++ b/airflow/hooks/S3_hook.py
@@ -16,12 +16,11 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# pylint: disable=invalid-name
+
 """This module is deprecated. Please use :mod:`airflow.providers.amazon.aws.hooks.s3`."""
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook, provide_bucket_name  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/base_hook.py b/airflow/hooks/base_hook.py
index 5c94e5f..cf1594d 100644
--- a/airflow/hooks/base_hook.py
+++ b/airflow/hooks/base_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.hooks.base import BaseHook  # noqa
 
 warnings.warn("This module is deprecated. Please use `airflow.hooks.base`.", DeprecationWarning, stacklevel=2)
diff --git a/airflow/hooks/dbapi.py b/airflow/hooks/dbapi.py
index 6c00320..553dacc 100644
--- a/airflow/hooks/dbapi.py
+++ b/airflow/hooks/dbapi.py
@@ -312,7 +312,7 @@ class DbApiHook(BaseHook):
         self.log.info("Done loading. Loaded a total of %s rows", i)
 
     @staticmethod
-    def _serialize_cell(cell, conn=None):  # pylint: disable=unused-argument
+    def _serialize_cell(cell, conn=None):
         """
         Returns the SQL literal of the cell as a string.
 
diff --git a/airflow/hooks/dbapi_hook.py b/airflow/hooks/dbapi_hook.py
index a3aafa9..4a441b0 100644
--- a/airflow/hooks/dbapi_hook.py
+++ b/airflow/hooks/dbapi_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.hooks.dbapi import DbApiHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/docker_hook.py b/airflow/hooks/docker_hook.py
index 80e6319..aaedd7e 100644
--- a/airflow/hooks/docker_hook.py
+++ b/airflow/hooks/docker_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.docker.hooks.docker import DockerHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/druid_hook.py b/airflow/hooks/druid_hook.py
index 9fe43e6..0a43deb 100644
--- a/airflow/hooks/druid_hook.py
+++ b/airflow/hooks/druid_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.druid.hooks.druid import DruidDbApiHook, DruidHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/hdfs_hook.py b/airflow/hooks/hdfs_hook.py
index 5476d54..fd13e73 100644
--- a/airflow/hooks/hdfs_hook.py
+++ b/airflow/hooks/hdfs_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.hdfs.hooks.hdfs import HDFSHook, HDFSHookException  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py
index f559d32..74d7863 100644
--- a/airflow/hooks/hive_hooks.py
+++ b/airflow/hooks/hive_hooks.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.hive.hooks.hive import (  # noqa
     HIVE_QUEUE_PRIORITIES,
     HiveCliHook,
diff --git a/airflow/hooks/http_hook.py b/airflow/hooks/http_hook.py
index c352074..5b8c1fd 100644
--- a/airflow/hooks/http_hook.py
+++ b/airflow/hooks/http_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.http.hooks.http import HttpHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/jdbc_hook.py b/airflow/hooks/jdbc_hook.py
index cf1dd6f..a032ab0 100644
--- a/airflow/hooks/jdbc_hook.py
+++ b/airflow/hooks/jdbc_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.jdbc.hooks.jdbc import JdbcHook, jaydebeapi  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/mssql_hook.py b/airflow/hooks/mssql_hook.py
index c0077cb..64943ee 100644
--- a/airflow/hooks/mssql_hook.py
+++ b/airflow/hooks/mssql_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/mysql_hook.py b/airflow/hooks/mysql_hook.py
index 2fca04e..4373136 100644
--- a/airflow/hooks/mysql_hook.py
+++ b/airflow/hooks/mysql_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.mysql.hooks.mysql import MySqlHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/oracle_hook.py b/airflow/hooks/oracle_hook.py
index bc2bab3..0dfe33a 100644
--- a/airflow/hooks/oracle_hook.py
+++ b/airflow/hooks/oracle_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.oracle.hooks.oracle import OracleHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/pig_hook.py b/airflow/hooks/pig_hook.py
index 4c50258..3ead3df 100644
--- a/airflow/hooks/pig_hook.py
+++ b/airflow/hooks/pig_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.pig.hooks.pig import PigCliHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/postgres_hook.py b/airflow/hooks/postgres_hook.py
index 2c6b9d1..16f79dc 100644
--- a/airflow/hooks/postgres_hook.py
+++ b/airflow/hooks/postgres_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.postgres.hooks.postgres import PostgresHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/presto_hook.py b/airflow/hooks/presto_hook.py
index c277ec4..0c33e14 100644
--- a/airflow/hooks/presto_hook.py
+++ b/airflow/hooks/presto_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.presto.hooks.presto import PrestoHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/samba_hook.py b/airflow/hooks/samba_hook.py
index 6411880..b4c7cf8 100644
--- a/airflow/hooks/samba_hook.py
+++ b/airflow/hooks/samba_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.samba.hooks.samba import SambaHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/slack_hook.py b/airflow/hooks/slack_hook.py
index 798aafc..43636b2 100644
--- a/airflow/hooks/slack_hook.py
+++ b/airflow/hooks/slack_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.slack.hooks.slack import SlackHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/sqlite_hook.py b/airflow/hooks/sqlite_hook.py
index faac450..7739004 100644
--- a/airflow/hooks/sqlite_hook.py
+++ b/airflow/hooks/sqlite_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.sqlite.hooks.sqlite import SqliteHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/subprocess.py b/airflow/hooks/subprocess.py
index 409dbb6..1c6aec4 100644
--- a/airflow/hooks/subprocess.py
+++ b/airflow/hooks/subprocess.py
@@ -62,8 +62,7 @@ class SubprocessHook(BaseHook):
 
             self.log.info('Running command: %s', command)
 
-            # pylint: disable=consider-using-with
-            self.sub_process = Popen(  # pylint: disable=subprocess-popen-preexec-fn
+            self.sub_process = Popen(
                 command,
                 stdout=PIPE,
                 stderr=STDOUT,
diff --git a/airflow/hooks/webhdfs_hook.py b/airflow/hooks/webhdfs_hook.py
index 77aae5d..1c43538 100644
--- a/airflow/hooks/webhdfs_hook.py
+++ b/airflow/hooks/webhdfs_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.hdfs.hooks.webhdfs import WebHDFSHook  # noqa
 
 warnings.warn(
diff --git a/airflow/hooks/zendesk_hook.py b/airflow/hooks/zendesk_hook.py
index 462c6e9..ab8366e 100644
--- a/airflow/hooks/zendesk_hook.py
+++ b/airflow/hooks/zendesk_hook.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.zendesk.hooks.zendesk import Zendesk, ZendeskError, ZendeskHook  # noqa
 
 warnings.warn(
diff --git a/airflow/jobs/__init__.py b/airflow/jobs/__init__.py
index 6bcff68..bbdabed 100644
--- a/airflow/jobs/__init__.py
+++ b/airflow/jobs/__init__.py
@@ -16,7 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 #
-import airflow.jobs.backfill_job  # noqa
-import airflow.jobs.base_job  # noqa
-import airflow.jobs.local_task_job  # noqa
+import airflow.jobs.backfill_job
+import airflow.jobs.base_job
+import airflow.jobs.local_task_job
 import airflow.jobs.scheduler_job  # noqa
diff --git a/airflow/jobs/backfill_job.py b/airflow/jobs/backfill_job.py
index b306bc4..adab943 100644
--- a/airflow/jobs/backfill_job.py
+++ b/airflow/jobs/backfill_job.py
@@ -95,7 +95,7 @@ class BackfillJob(BaseJob):
         """
 
         # TODO(edgarRd): AIRFLOW-1444: Add consistency check on counts
-        def __init__(  # pylint: disable=too-many-arguments
+        def __init__(
             self,
             to_run=None,
             running=None,
@@ -121,7 +121,7 @@ class BackfillJob(BaseJob):
             self.finished_runs = finished_runs
             self.total_runs = total_runs
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         dag,
         start_date=None,
@@ -391,7 +391,7 @@ class BackfillJob(BaseJob):
         self.log.debug("Finished dag run loop iteration. Remaining tasks %s", ti_status.to_run.values())
 
     @provide_session
-    def _process_backfill_task_instances(  # pylint: disable=too-many-statements
+    def _process_backfill_task_instances(
         self,
         ti_status,
         executor,
@@ -428,7 +428,7 @@ class BackfillJob(BaseJob):
             # determined deadlocked while they are actually
             # waiting for their upstream to finish
             @provide_session
-            def _per_task_process(key, ti, session=None):  # pylint: disable=too-many-return-statements
+            def _per_task_process(key, ti, session=None):
                 ti.refresh_from_db(lock_for_update=True, session=session)
 
                 task = self.dag.get_task(ti.task_id, include_subdags=True)
@@ -554,7 +554,7 @@ class BackfillJob(BaseJob):
                 self.log.debug('Adding %s to not_ready', ti)
                 ti_status.not_ready.add(key)
 
-            try:  # pylint: disable=too-many-nested-blocks
+            try:
                 for task in self.dag.topological_sort(include_subdag_tasks=True):
                     for key, ti in list(ti_status.to_run.items()):
                         if task.task_id != ti.task_id:
@@ -790,7 +790,7 @@ class BackfillJob(BaseJob):
 
         ti_status.total_runs = len(run_dates)  # total dag runs in backfill
 
-        try:  # pylint: disable=too-many-nested-blocks
+        try:
             remaining_dates = ti_status.total_runs
             while remaining_dates > 0:
                 dates_to_process = [
@@ -860,7 +860,6 @@ class BackfillJob(BaseJob):
                     ),
                 )
                 .filter(
-                    # pylint: disable=comparison-with-callable
                     DagRun.state == State.RUNNING,
                     DagRun.run_type != DagRunType.BACKFILL_JOB,
                     TaskInstance.state.in_(resettable_states),
diff --git a/airflow/jobs/base_job.py b/airflow/jobs/base_job.py
index 4edb692..18893f2 100644
--- a/airflow/jobs/base_job.py
+++ b/airflow/jobs/base_job.py
@@ -152,7 +152,7 @@ class BaseJob(Base, LoggingMixin):
         job.end_date = timezone.utcnow()
         try:
             self.on_kill()
-        except Exception as e:  # pylint: disable=broad-except
+        except Exception as e:
             self.log.error('on_kill() method failed: %s', str(e))
         session.merge(job)
         session.commit()
diff --git a/airflow/jobs/local_task_job.py b/airflow/jobs/local_task_job.py
index c697a85..06a2f57 100644
--- a/airflow/jobs/local_task_job.py
+++ b/airflow/jobs/local_task_job.py
@@ -72,7 +72,6 @@ class LocalTaskJob(BaseJob):
     def _execute(self):
         self.task_runner = get_task_runner(self)
 
-        # pylint: disable=unused-argument
         def signal_handler(signum, frame):
             """Setting kill signal handler"""
             self.log.error("Received SIGTERM. Terminating subprocesses")
@@ -80,12 +79,9 @@ class LocalTaskJob(BaseJob):
             self.task_instance.refresh_from_db()
             if self.task_instance.state not in State.finished:
                 self.task_instance.set_state(State.FAILED)
-            self.task_instance._run_finished_callback(  # pylint: disable=protected-access
-                error="task received sigterm"
-            )
+            self.task_instance._run_finished_callback(error="task received sigterm")
             raise AirflowException("LocalTaskJob received SIGTERM signal")
 
-        # pylint: enable=unused-argument
         signal.signal(signal.SIGTERM, signal_handler)
 
         if not self.task_instance.check_and_change_state_before_execution(
@@ -161,7 +157,7 @@ class LocalTaskJob(BaseJob):
             self.task_instance.set_state(State.FAILED)
         if self.task_instance.state != State.SUCCESS:
             error = self.task_runner.deserialize_run_error()
-        self.task_instance._run_finished_callback(error=error)  # pylint: disable=protected-access
+        self.task_instance._run_finished_callback(error=error)
         if not self.task_instance.test_mode:
             self._update_dagrun_state_for_paused_dag()
 
@@ -209,7 +205,7 @@ class LocalTaskJob(BaseJob):
                 # error file will not be populated and it must be updated by
                 # external source suck as web UI
                 error = self.task_runner.deserialize_run_error() or "task marked as failed externally"
-            ti._run_finished_callback(error=error)  # pylint: disable=protected-access
+            ti._run_finished_callback(error=error)
             self.terminating = True
 
     @provide_session
diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py
index 1340fb8..b99f4b2 100644
--- a/airflow/jobs/scheduler_job.py
+++ b/airflow/jobs/scheduler_job.py
@@ -1,4 +1,3 @@
-# pylint: disable=no-name-in-module
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -678,7 +677,7 @@ def _is_parent_process():
     return multiprocessing.current_process().name == 'MainProcess'
 
 
-class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
+class SchedulerJob(BaseJob):
     """
     This SchedulerJob runs for a specific time interval and schedules the jobs
     that are ready to run. It figures out the latest runs for each
@@ -751,7 +750,7 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
         signal.signal(signal.SIGTERM, self._exit_gracefully)
         signal.signal(signal.SIGUSR2, self._debug_dump)
 
-    def _exit_gracefully(self, signum, frame) -> None:  # pylint: disable=unused-argument
+    def _exit_gracefully(self, signum, frame) -> None:
         """Helper method to clean up processor_agent to avoid leaving orphan processes."""
         if not _is_parent_process():
             # Only the parent process should perform the cleanup.
@@ -762,14 +761,14 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
             self.processor_agent.end()
         sys.exit(os.EX_OK)
 
-    def _debug_dump(self, signum, frame):  # pylint: disable=unused-argument
+    def _debug_dump(self, signum, frame):
         if not _is_parent_process():
             # Only the parent process should perform the debug dump.
             return
 
         try:
-            sig_name = signal.Signals(signum).name  # pylint: disable=no-member
-        except Exception:  # pylint: disable=broad-except
+            sig_name = signal.Signals(signum).name
+        except Exception:
             sig_name = str(signum)
 
         self.log.info("%s\n%s received, printing debug\n%s", "-" * 80, sig_name, "-" * 80)
@@ -822,9 +821,7 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
             .filter(models.TaskInstance.state.in_(old_states))
             .filter(
                 or_(
-                    # pylint: disable=comparison-with-callable
                     models.DagRun.state != State.RUNNING,
-                    # pylint: disable=no-member
                     models.DagRun.state.is_(None),
                 )
             )
@@ -900,7 +897,6 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
             task_map[(dag_id, task_id)] = count
         return dag_map, task_map
 
-    # pylint: disable=too-many-locals,too-many-statements
     @provide_session
     def _executable_task_instances_to_queued(self, max_tis: int, session: Session = None) -> List[TI]:
         """
@@ -980,7 +976,7 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
 
         # Go through each pool, and queue up a task for execution if there are
         # any open slots in the pool.
-        # pylint: disable=too-many-nested-blocks
+
         for pool, task_instances in pool_to_task_instances.items():
             pool_name = pool
             if pool not in pools:
@@ -1315,17 +1311,17 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
                 models.DAG.deactivate_stale_dags(execute_start_time)
 
             settings.Session.remove()  # type: ignore
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             self.log.exception("Exception when executing SchedulerJob._run_scheduler_loop")
             raise
         finally:
             try:
                 self.executor.end()
-            except Exception:  # pylint: disable=broad-except
+            except Exception:
                 self.log.exception("Exception when executing Executor.end")
             try:
                 self.processor_agent.end()
-            except Exception:  # pylint: disable=broad-except
+            except Exception:
                 self.log.exception("Exception when executing DagFileProcessorAgent.end")
             self.log.info("Exited execute loop")
 
@@ -1658,7 +1654,7 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
             session.query(DagRun.dag_id, func.count('*'))
             .filter(
                 DagRun.dag_id.in_([o.dag_id for o in dag_models]),
-                DagRun.state == State.RUNNING,  # pylint: disable=comparison-with-callable
+                DagRun.state == State.RUNNING,
                 DagRun.external_trigger.is_(False),
             )
             .group_by(DagRun.dag_id)
@@ -1874,7 +1870,6 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
                         .join(TI.dag_run)
                         .filter(
                             DagRun.run_type != DagRunType.BACKFILL_JOB,
-                            # pylint: disable=comparison-with-callable
                             DagRun.state == State.RUNNING,
                         )
                         .options(load_only(TI.dag_id, TI.task_id, TI.execution_date))
diff --git a/airflow/kubernetes/kube_client.py b/airflow/kubernetes/kube_client.py
index 0b772be..1c20bd3 100644
--- a/airflow/kubernetes/kube_client.py
+++ b/airflow/kubernetes/kube_client.py
@@ -26,12 +26,9 @@ try:
     from kubernetes import client, config
     from kubernetes.client import Configuration
     from kubernetes.client.api_client import ApiClient
-    from kubernetes.client.rest import ApiException  # pylint: disable=unused-import
+    from kubernetes.client.rest import ApiException
 
-    from airflow.kubernetes.refresh_config import (  # pylint: disable=ungrouped-imports
-        RefreshConfiguration,
-        load_kube_config,
-    )
+    from airflow.kubernetes.refresh_config import RefreshConfiguration, load_kube_config
 
     has_kubernetes = True
 
diff --git a/airflow/kubernetes/kube_config.py b/airflow/kubernetes/kube_config.py
index 11ec279..ef32831 100644
--- a/airflow/kubernetes/kube_config.py
+++ b/airflow/kubernetes/kube_config.py
@@ -20,14 +20,14 @@ from airflow.configuration import conf
 from airflow.settings import AIRFLOW_HOME
 
 
-class KubeConfig:  # pylint: disable=too-many-instance-attributes
+class KubeConfig:
     """Configuration for Kubernetes"""
 
     core_section = 'core'
     kubernetes_section = 'kubernetes'
     logging_section = 'logging'
 
-    def __init__(self):  # pylint: disable=too-many-statements
+    def __init__(self):
         configuration_dict = conf.as_dict(display_sensitive=True)
         self.core_configuration = configuration_dict[self.core_section]
         self.airflow_home = AIRFLOW_HOME
diff --git a/airflow/kubernetes/pod.py b/airflow/kubernetes/pod.py
index 02ce912..6bced0f 100644
--- a/airflow/kubernetes/pod.py
+++ b/airflow/kubernetes/pod.py
@@ -20,14 +20,11 @@ This module is deprecated.
 Please use :mod:`kubernetes.client.models for V1ResourceRequirements and Port.
 """
 # flake8: noqa
-# pylint: disable=unused-import
+
 import warnings
 
 with warnings.catch_warnings():
-    from airflow.providers.cncf.kubernetes.backcompat.pod import (  # pylint: disable=unused-import
-        Port,
-        Resources,
-    )
+    from airflow.providers.cncf.kubernetes.backcompat.pod import Port, Resources
 
 warnings.warn(
     "This module is deprecated. Please use `kubernetes.client.models for V1ResourceRequirements and Port.",
diff --git a/airflow/kubernetes/pod_generator.py b/airflow/kubernetes/pod_generator.py
index 4b4028f..fc536da 100644
--- a/airflow/kubernetes/pod_generator.py
+++ b/airflow/kubernetes/pod_generator.py
@@ -101,7 +101,7 @@ class PodGenerator:
     :type extract_xcom: bool
     """
 
-    def __init__(  # pylint: disable=too-many-arguments,too-many-locals
+    def __init__(
         self,
         pod: Optional[k8s.V1Pod] = None,
         pod_template_file: Optional[str] = None,
@@ -325,7 +325,7 @@ class PodGenerator:
         )
 
     @staticmethod
-    def construct_pod(  # pylint: disable=too-many-arguments
+    def construct_pod(
         dag_id: str,
         task_id: str,
         pod_id: str,
@@ -348,7 +348,7 @@ class PodGenerator:
             image = pod_override_object.spec.containers[0].image  # type: ignore
             if not image:
                 image = kube_image
-        except Exception:  # pylint: disable=W0703
+        except Exception:
             image = kube_image
 
         dynamic_pod = k8s.V1Pod(
@@ -417,7 +417,6 @@ class PodGenerator:
         else:
             pod = yaml.safe_load(path)
 
-        # pylint: disable=protected-access
         return PodGenerator.deserialize_model_dict(pod)
 
     @staticmethod
@@ -429,7 +428,7 @@ class PodGenerator:
         :return: De-serialized k8s.V1Pod
         """
         api_client = ApiClient()
-        return api_client._ApiClient__deserialize_model(pod_dict, k8s.V1Pod)  # pylint: disable=W0212
+        return api_client._ApiClient__deserialize_model(pod_dict, k8s.V1Pod)
 
     @staticmethod
     def make_unique_pod_id(pod_id: str) -> str:
diff --git a/airflow/kubernetes/pod_generator_deprecated.py b/airflow/kubernetes/pod_generator_deprecated.py
index 79bdcb4..c4d83d6 100644
--- a/airflow/kubernetes/pod_generator_deprecated.py
+++ b/airflow/kubernetes/pod_generator_deprecated.py
@@ -143,7 +143,7 @@ class PodGenerator:
     :type priority_class_name: str
     """
 
-    def __init__(  # pylint: disable=too-many-arguments,too-many-locals
+    def __init__(
         self,
         image: Optional[str] = None,
         name: Optional[str] = None,
diff --git a/airflow/kubernetes/pod_launcher.py b/airflow/kubernetes/pod_launcher.py
index 72ebba4..48a9060 100644
--- a/airflow/kubernetes/pod_launcher.py
+++ b/airflow/kubernetes/pod_launcher.py
@@ -20,5 +20,5 @@ This module is deprecated.
 Please use :mod:`kubernetes.client.models` for V1ResourceRequirements and Port.
 """
 # flake8: noqa
-# pylint: disable=unused-import
-from airflow.kubernetes.pod_launcher_deprecated import PodLauncher, PodStatus  # pylint: disable=unused-import
+
+from airflow.kubernetes.pod_launcher_deprecated import PodLauncher, PodStatus
diff --git a/airflow/kubernetes/pod_runtime_info_env.py b/airflow/kubernetes/pod_runtime_info_env.py
index 9a7ad19..4d7bd9f 100644
--- a/airflow/kubernetes/pod_runtime_info_env.py
+++ b/airflow/kubernetes/pod_runtime_info_env.py
@@ -17,7 +17,7 @@
 # under the License.
 """This module is deprecated. Please use :mod:`kubernetes.client.models.V1EnvVar`."""
 # flake8: noqa
-# pylint: disable=unused-import
+
 import warnings
 
 with warnings.catch_warnings():
diff --git a/airflow/kubernetes/refresh_config.py b/airflow/kubernetes/refresh_config.py
index a039e7d..2564951 100644
--- a/airflow/kubernetes/refresh_config.py
+++ b/airflow/kubernetes/refresh_config.py
@@ -62,12 +62,12 @@ class RefreshKubeConfigLoader(KubeConfigLoader):
             if 'token' not in status:
                 logging.error('exec: missing token field in plugin output')
                 return None
-            self.token = f"Bearer {status['token']}"  # pylint: disable=W0201
+            self.token = f"Bearer {status['token']}"
             ts_str = status.get('expirationTimestamp')
             if ts_str:
                 self.api_key_expire_ts = _parse_timestamp(ts_str)
             return True
-        except Exception as e:  # pylint: disable=W0703
+        except Exception as e:
             logging.error(str(e))
             return None
 
@@ -93,7 +93,7 @@ class RefreshConfiguration(Configuration):
 
     def get_api_key_with_prefix(self, identifier):
         if self.refresh_api_key:
-            self.refresh_api_key(self)  # pylint: disable=E1102
+            self.refresh_api_key(self)
         return Configuration.get_api_key_with_prefix(self, identifier)
 
 
diff --git a/airflow/kubernetes/volume.py b/airflow/kubernetes/volume.py
index fa452df..7fd58e2 100644
--- a/airflow/kubernetes/volume.py
+++ b/airflow/kubernetes/volume.py
@@ -17,7 +17,7 @@
 # under the License.
 """This module is deprecated. Please use :mod:`kubernetes.client.models.V1Volume`."""
 # flake8: noqa
-# pylint: disable=unused-import
+
 
 import warnings
 
diff --git a/airflow/kubernetes/volume_mount.py b/airflow/kubernetes/volume_mount.py
index eee7b5e..08bc5d3 100644
--- a/airflow/kubernetes/volume_mount.py
+++ b/airflow/kubernetes/volume_mount.py
@@ -17,7 +17,7 @@
 # under the License.
 """This module is deprecated. Please use :mod:`kubernetes.client.models.V1VolumeMount`."""
 # flake8: noqa
-# pylint: disable=unused-import
+
 
 import warnings
 
diff --git a/airflow/lineage/__init__.py b/airflow/lineage/__init__.py
index 905eb00..ff2a028 100644
--- a/airflow/lineage/__init__.py
+++ b/airflow/lineage/__init__.py
@@ -92,7 +92,7 @@ def _to_dataset(obj: Any, source: str) -> Optional[Metadata]:
     return Metadata(type_name, source, data)
 
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def apply_lineage(func: T) -> T:
@@ -138,7 +138,7 @@ def prepare_lineage(func: T) -> T:
     * "list of datasets" -> manually defined list of data
 
     """
-    # pylint: disable=protected-access
+
     @wraps(func)
     def wrapper(self, context, *args, **kwargs):
         from airflow.models.base import Operator
diff --git a/airflow/lineage/backend.py b/airflow/lineage/backend.py
index edfbe0e..cde8d94 100644
--- a/airflow/lineage/backend.py
+++ b/airflow/lineage/backend.py
@@ -19,7 +19,7 @@
 from typing import TYPE_CHECKING, Optional
 
 if TYPE_CHECKING:
-    from airflow.models.baseoperator import BaseOperator  # pylint: disable=cyclic-import
+    from airflow.models.baseoperator import BaseOperator
 
 
 class LineageBackend:
diff --git a/airflow/lineage/entities.py b/airflow/lineage/entities.py
index f2bad75..87703ed 100644
--- a/airflow/lineage/entities.py
+++ b/airflow/lineage/entities.py
@@ -62,8 +62,9 @@ class Column:
 # this is a temporary hack to satisfy mypy. Once
 # https://github.com/python/mypy/issues/6136 is resolved, use
 # `attr.converters.default_if_none(default=False)`
-# pylint: disable=missing-docstring
-def default_if_none(arg: Optional[bool]) -> bool:  # noqa: D103
+
+
+def default_if_none(arg: Optional[bool]) -> bool:
     return arg or False
 
 
diff --git a/airflow/logging_config.py b/airflow/logging_config.py
index bfb1a11..05d6090 100644
--- a/airflow/logging_config.py
+++ b/airflow/logging_config.py
@@ -77,7 +77,7 @@ def configure_logging():
     return logging_class_path
 
 
-def validate_logging_config(logging_config):  # pylint: disable=unused-argument
+def validate_logging_config(logging_config):
     """Validate the provided Logging Config"""
     # Now lets validate the other logging-related settings
     task_log_reader = conf.get('logging', 'task_log_reader')
diff --git a/airflow/migrations/env.py b/airflow/migrations/env.py
index 9b12f6c..e0c558c 100644
--- a/airflow/migrations/env.py
+++ b/airflow/migrations/env.py
@@ -21,7 +21,6 @@ from logging.config import fileConfig
 from alembic import context
 
 from airflow import models, settings
-from airflow.models.serialized_dag import SerializedDagModel  # pylint: disable=unused-import # noqa
 
 
 def include_object(_, name, type_, *args):
@@ -102,7 +101,7 @@ def run_migrations_online():
             if connection.dialect.name == 'mysql' and connection.dialect.server_version_info >= (5, 6):
                 connection.execute("select GET_LOCK('alembic',1800);")
             if connection.dialect.name == 'postgresql':
-                context.get_context()._ensure_version_table()  # pylint: disable=protected-access
+                context.get_context()._ensure_version_table()
                 connection.execute("LOCK TABLE alembic_version IN ACCESS EXCLUSIVE MODE")
             context.run_migrations()
             if connection.dialect.name == 'mysql' and connection.dialect.server_version_info >= (5, 6):
diff --git a/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py b/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py
index d66bf5c..fc84681 100644
--- a/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py
+++ b/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py
@@ -32,9 +32,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_index('sm_dag', 'sla_miss', ['dag_id'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index('sm_dag', table_name='sla_miss')
diff --git a/airflow/migrations/versions/05f30312d566_merge_heads.py b/airflow/migrations/versions/05f30312d566_merge_heads.py
index ffe2330..36940c6 100644
--- a/airflow/migrations/versions/05f30312d566_merge_heads.py
+++ b/airflow/migrations/versions/05f30312d566_merge_heads.py
@@ -30,9 +30,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     pass
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     pass
diff --git a/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py b/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py
index 4c572f4..33e89b3 100644
--- a/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py
+++ b/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py
@@ -41,19 +41,19 @@ INDEX_NAME = 'idx_' + TABLE_NAME + '_dag_task_date'
 # For Microsoft SQL Server, TIMESTAMP is a row-id type,
 # having nothing to do with date-time.  DateTime() will
 # be sufficient.
-def mssql_timestamp():  # noqa: D103
+def mssql_timestamp():
     return sa.DateTime()
 
 
-def mysql_timestamp():  # noqa: D103
+def mysql_timestamp():
     return mysql.TIMESTAMP(fsp=6)
 
 
-def sa_timestamp():  # noqa: D103
+def sa_timestamp():
     return sa.TIMESTAMP(timezone=True)
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     # See 0e2a74e0fc9f_add_time_zone_awareness
     conn = op.get_bind()
     if conn.dialect.name == 'mysql':
@@ -85,6 +85,6 @@ def upgrade():  # noqa: D103
     op.create_index(INDEX_NAME, TABLE_NAME, ['dag_id', 'task_id', 'execution_date'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index(INDEX_NAME, table_name=TABLE_NAME)
     op.drop_table(TABLE_NAME)
diff --git a/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py b/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py
index f18809c..e4d35c0 100644
--- a/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py
+++ b/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py
@@ -34,7 +34,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     conn = op.get_bind()
     if conn.dialect.name == "mysql":
         conn.execute("SET time_zone = '+00:00'")
@@ -277,7 +277,7 @@ def upgrade():  # noqa: D103
         )
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     conn = op.get_bind()
     if conn.dialect.name == "mysql":
         conn.execute("SET time_zone = '+00:00'")
diff --git a/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py b/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py
index 855e55c..288a0b6 100644
--- a/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py
+++ b/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py
@@ -32,9 +32,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index('dag_id_state', table_name='dag_run')
diff --git a/airflow/migrations/versions/13eb55f81627_for_compatibility.py b/airflow/migrations/versions/13eb55f81627_for_compatibility.py
index 538db1a..a15b20c 100644
--- a/airflow/migrations/versions/13eb55f81627_for_compatibility.py
+++ b/airflow/migrations/versions/13eb55f81627_for_compatibility.py
@@ -31,9 +31,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     pass
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     pass
diff --git a/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py b/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py
index 7afdeb2..1a539c6 100644
--- a/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py
+++ b/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py
@@ -38,7 +38,7 @@ connectionhelper = sa.Table(
 )
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     # first check if the user already has this done. This should only be
     # true for users who are upgrading from a previous version of Airflow
     # that predates Alembic integration
@@ -58,5 +58,5 @@ def upgrade():  # noqa: D103
     conn.execute(connectionhelper.update().values(is_encrypted=False))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('connection', 'is_encrypted')
diff --git a/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py b/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py
index e880d77..3e7c694 100644
--- a/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py
+++ b/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py
@@ -33,9 +33,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('variable', sa.Column('is_encrypted', sa.Boolean, default=False))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('variable', 'is_encrypted')
diff --git a/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py b/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py
index 7edebfc..6ef4d91 100644
--- a/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py
+++ b/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py
@@ -34,7 +34,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_table(
         'dag_run',
         sa.Column('id', sa.Integer(), nullable=False),
@@ -49,5 +49,5 @@ def upgrade():  # noqa: D103
     )
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_table('dag_run')
diff --git a/airflow/migrations/versions/211e584da130_add_ti_state_index.py b/airflow/migrations/versions/211e584da130_add_ti_state_index.py
index 7df1550..86c2e7d 100644
--- a/airflow/migrations/versions/211e584da130_add_ti_state_index.py
+++ b/airflow/migrations/versions/211e584da130_add_ti_state_index.py
@@ -32,9 +32,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_index('ti_state', 'task_instance', ['state'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index('ti_state', table_name='task_instance')
diff --git a/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py b/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py
index d0853ef..c02a1f5 100644
--- a/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py
+++ b/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py
@@ -38,9 +38,9 @@ TASK_INSTANCE_TABLE = "task_instance"
 NEW_COLUMN = "executor_config"
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column(TASK_INSTANCE_TABLE, sa.Column(NEW_COLUMN, sa.PickleType(pickler=dill)))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column(TASK_INSTANCE_TABLE, NEW_COLUMN)
diff --git a/airflow/migrations/versions/2e541a1dcfed_task_duration.py b/airflow/migrations/versions/2e541a1dcfed_task_duration.py
index 12d8e2e..e7535bd 100644
--- a/airflow/migrations/versions/2e541a1dcfed_task_duration.py
+++ b/airflow/migrations/versions/2e541a1dcfed_task_duration.py
@@ -35,7 +35,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     # use batch_alter_table to support SQLite workaround
     with op.batch_alter_table("task_instance") as batch_op:
         batch_op.alter_column(
@@ -46,5 +46,5 @@ def upgrade():  # noqa: D103
         )
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     pass
diff --git a/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py b/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py
index 3dcbe47..cf461c0 100644
--- a/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py
+++ b/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py
@@ -32,9 +32,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.rename_table('user', 'users')
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.rename_table('users', 'user')
diff --git a/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py b/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py
index 60ed662..b8ab290 100644
--- a/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py
+++ b/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py
@@ -33,11 +33,11 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('task_instance', sa.Column('operator', sa.String(length=1000), nullable=True))
     op.add_column('task_instance', sa.Column('queued_dttm', sa.DateTime(), nullable=True))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('task_instance', 'queued_dttm')
     op.drop_column('task_instance', 'operator')
diff --git a/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py b/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py
index 56542eb..d8d600b 100644
--- a/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py
+++ b/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py
@@ -36,7 +36,7 @@ depends_on = None
 RESOURCE_TABLE = "kube_resource_version"
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     conn = op.get_bind()
     inspector = Inspector.from_engine(conn)
 
@@ -60,7 +60,7 @@ def upgrade():  # noqa: D103
         op.bulk_insert(table, [{"resource_version": ""}])
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     conn = op.get_bind()
     inspector = Inspector.from_engine(conn)
 
diff --git a/airflow/migrations/versions/40e67319e3a9_dagrun_config.py b/airflow/migrations/versions/40e67319e3a9_dagrun_config.py
index 96c211e..7ce1838 100644
--- a/airflow/migrations/versions/40e67319e3a9_dagrun_config.py
+++ b/airflow/migrations/versions/40e67319e3a9_dagrun_config.py
@@ -33,9 +33,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('dag_run', sa.Column('conf', sa.PickleType(), nullable=True))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('dag_run', 'conf')
diff --git a/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py b/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py
index 572845b..8b3e9b4 100644
--- a/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py
+++ b/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py
@@ -33,9 +33,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('users', sa.Column('superuser', sa.Boolean(), default=False))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('users', 'superuser')
diff --git a/airflow/migrations/versions/4446e08588_dagrun_start_end.py b/airflow/migrations/versions/4446e08588_dagrun_start_end.py
index 2ee5273..48c85d5 100644
--- a/airflow/migrations/versions/4446e08588_dagrun_start_end.py
+++ b/airflow/migrations/versions/4446e08588_dagrun_start_end.py
@@ -34,11 +34,11 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('dag_run', sa.Column('end_date', sa.DateTime(), nullable=True))
     op.add_column('dag_run', sa.Column('start_date', sa.DateTime(), nullable=True))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('dag_run', 'start_date')
     op.drop_column('dag_run', 'end_date')
diff --git a/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py b/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py
index 1f32448..00ec5df 100644
--- a/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py
+++ b/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py
@@ -34,8 +34,8 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
-    conn = op.get_bind()  # pylint: disable=no-member
+def upgrade():
+    conn = op.get_bind()
     if conn.dialect.name == "mysql":
         op.alter_column(table_name='dag', column_name='last_scheduler_run', type_=mysql.DATETIME(fsp=6))
         op.alter_column(table_name='dag', column_name='last_pickled', type_=mysql.DATETIME(fsp=6))
@@ -79,8 +79,8 @@ def upgrade():  # noqa: D103
         op.alter_column(table_name='xcom', column_name='execution_date', type_=mysql.DATETIME(fsp=6))
 
 
-def downgrade():  # noqa: D103
-    conn = op.get_bind()  # pylint: disable=no-member
+def downgrade():
+    conn = op.get_bind()
     if conn.dialect.name == "mysql":
         op.alter_column(table_name='dag', column_name='last_scheduler_run', type_=mysql.DATETIME())
         op.alter_column(table_name='dag', column_name='last_pickled', type_=mysql.DATETIME())
diff --git a/airflow/migrations/versions/502898887f84_adding_extra_to_log.py b/airflow/migrations/versions/502898887f84_adding_extra_to_log.py
index 0f00e11..1bf5f0d 100644
--- a/airflow/migrations/versions/502898887f84_adding_extra_to_log.py
+++ b/airflow/migrations/versions/502898887f84_adding_extra_to_log.py
@@ -33,9 +33,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('log', sa.Column('extra', sa.Text(), nullable=True))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('log', 'extra')
diff --git a/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py b/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py
index 84daac5..122821b 100644
--- a/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py
+++ b/airflow/migrations/versions/52d53670a240_fix_mssql_exec_date_rendered_task_instance.py
@@ -44,10 +44,10 @@ def upgrade():
     conn = op.get_bind()
     if conn.dialect.name == "mssql":
         json_type = sa.Text
-        op.drop_table(TABLE_NAME)  # pylint: disable=no-member
+        op.drop_table(TABLE_NAME)
 
         op.create_table(
-            TABLE_NAME,  # pylint: disable=no-member
+            TABLE_NAME,
             sa.Column('dag_id', sa.String(length=250), nullable=False),
             sa.Column('task_id', sa.String(length=250), nullable=False),
             sa.Column('execution_date', mssql.DATETIME2, nullable=False),
@@ -64,10 +64,10 @@ def downgrade():
     conn = op.get_bind()
     if conn.dialect.name == "mssql":
         json_type = sa.Text
-        op.drop_table(TABLE_NAME)  # pylint: disable=no-member
+        op.drop_table(TABLE_NAME)
 
         op.create_table(
-            TABLE_NAME,  # pylint: disable=no-member
+            TABLE_NAME,
             sa.Column('dag_id', sa.String(length=250), nullable=False),
             sa.Column('task_id', sa.String(length=250), nullable=False),
             sa.Column('execution_date', sa.TIMESTAMP, nullable=False),
diff --git a/airflow/migrations/versions/52d714495f0_job_id_indices.py b/airflow/migrations/versions/52d714495f0_job_id_indices.py
index fc3ecad..abf4e2d 100644
--- a/airflow/migrations/versions/52d714495f0_job_id_indices.py
+++ b/airflow/migrations/versions/52d714495f0_job_id_indices.py
@@ -32,9 +32,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_index('idx_job_state_heartbeat', 'job', ['state', 'latest_heartbeat'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index('idx_job_state_heartbeat', table_name='job')
diff --git a/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py b/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py
index 144259e..5a83b11 100644
--- a/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py
+++ b/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py
@@ -33,9 +33,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('user', sa.Column('password', sa.String(255)))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('user', 'password')
diff --git a/airflow/migrations/versions/61ec73d9401f_add_description_field_to_connection.py b/airflow/migrations/versions/61ec73d9401f_add_description_field_to_connection.py
index 1b5963b..ff480c7 100644
--- a/airflow/migrations/versions/61ec73d9401f_add_description_field_to_connection.py
+++ b/airflow/migrations/versions/61ec73d9401f_add_description_field_to_connection.py
@@ -36,7 +36,7 @@ depends_on = None
 
 def upgrade():
     """Apply Add description field to connection"""
-    conn = op.get_bind()  # pylint: disable=no-member
+    conn = op.get_bind()
 
     with op.batch_alter_table('connection') as batch_op:
         if conn.dialect.name == "mysql":
diff --git a/airflow/migrations/versions/64a7d6477aae_fix_description_field_in_connection_to_.py b/airflow/migrations/versions/64a7d6477aae_fix_description_field_in_connection_to_.py
index 09261d5..fa7b2b2 100644
--- a/airflow/migrations/versions/64a7d6477aae_fix_description_field_in_connection_to_.py
+++ b/airflow/migrations/versions/64a7d6477aae_fix_description_field_in_connection_to_.py
@@ -24,8 +24,8 @@ Create Date: 2020-11-25 08:56:11.866607
 
 """
 
-import sqlalchemy as sa  # noqa
-from alembic import op  # noqa
+import sqlalchemy as sa
+from alembic import op
 
 # revision identifiers, used by Alembic.
 revision = '64a7d6477aae'
@@ -36,7 +36,7 @@ depends_on = None
 
 def upgrade():
     """Apply fix description field in connection to be text"""
-    conn = op.get_bind()  # pylint: disable=no-member
+    conn = op.get_bind()
     if conn.dialect.name == "sqlite":
         # in sqlite TEXT and STRING column types are the same
         return
@@ -55,7 +55,7 @@ def upgrade():
 
 def downgrade():
     """Unapply fix description field in connection to be text"""
-    conn = op.get_bind()  # pylint: disable=no-member
+    conn = op.get_bind()
     if conn.dialect.name == "sqlite":
         # in sqlite TEXT and STRING column types are the same
         return
diff --git a/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py b/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py
index 40dd9dd..9d93736 100644
--- a/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py
+++ b/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py
@@ -35,7 +35,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_table(
         'task_fail',
         sa.Column('id', sa.Integer(), nullable=False),
@@ -49,5 +49,5 @@ def upgrade():  # noqa: D103
     )
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_table('task_fail')
diff --git a/airflow/migrations/versions/849da589634d_prefix_dag_permissions.py b/airflow/migrations/versions/849da589634d_prefix_dag_permissions.py
index 556c1a5..eee6274 100644
--- a/airflow/migrations/versions/849da589634d_prefix_dag_permissions.py
+++ b/airflow/migrations/versions/849da589634d_prefix_dag_permissions.py
@@ -37,7 +37,7 @@ branch_labels = None
 depends_on = None
 
 
-def prefix_individual_dag_permissions(session):  # noqa: D103
+def prefix_individual_dag_permissions(session):
     dag_perms = ['can_dag_read', 'can_dag_edit']
     prefix = "DAG:"
     permission_view_menus = (
@@ -55,7 +55,7 @@ def prefix_individual_dag_permissions(session):  # noqa: D103
     session.commit()
 
 
-def get_or_create_dag_resource(session):  # noqa: D103
+def get_or_create_dag_resource(session):
     dag_resource = get_resource_query(session, permissions.RESOURCE_DAG).first()
     if dag_resource:
         return dag_resource
@@ -68,7 +68,7 @@ def get_or_create_dag_resource(session):  # noqa: D103
     return dag_resource
 
 
-def get_or_create_action(session, action_name):  # noqa: D103
+def get_or_create_action(session, action_name):
     action = get_action_query(session, action_name).first()
     if action:
         return action
@@ -81,28 +81,28 @@ def get_or_create_action(session, action_name):  # noqa: D103
     return action
 
 
-def get_resource_query(session, resource_name):  # noqa: D103
+def get_resource_query(session, resource_name):
     return session.query(ViewMenu).filter(ViewMenu.name == resource_name)
 
 
-def get_action_query(session, action_name):  # noqa: D103
+def get_action_query(session, action_name):
     return session.query(Permission).filter(Permission.name == action_name)
 
 
-def get_pv_with_action_query(session, action):  # noqa: D103
+def get_pv_with_action_query(session, action):
     return session.query(PermissionView).filter(PermissionView.permission == action)
 
 
-def get_pv_with_resource_query(session, resource):  # noqa: D103
+def get_pv_with_resource_query(session, resource):
     return session.query(PermissionView).filter(PermissionView.view_menu_id == resource.id)
 
 
-def update_pv_action(session, pv_query, action):  # noqa: D103
+def update_pv_action(session, pv_query, action):
     pv_query.update({PermissionView.permission_id: action.id}, synchronize_session=False)
     session.commit()
 
 
-def get_pv(session, resource, action):  # noqa: D103
+def get_pv(session, resource, action):
     return (
         session.query(PermissionView)
         .filter(PermissionView.view_menu == resource)
@@ -111,9 +111,9 @@ def get_pv(session, resource, action):  # noqa: D103
     )
 
 
-def update_pv_resource(session, pv_query, resource):  # noqa: D103
-    for pv in pv_query.all():  # noqa: D103
-        if not get_pv(session, resource, pv.permission):  # noqa: D103
+def update_pv_resource(session, pv_query, resource):
+    for pv in pv_query.all():
+        if not get_pv(session, resource, pv.permission):
             pv.view_menu = resource
         else:
             session.delete(pv)
@@ -121,7 +121,7 @@ def update_pv_resource(session, pv_query, resource):  # noqa: D103
     session.commit()
 
 
-def migrate_to_new_dag_permissions(db):  # noqa: D103
+def migrate_to_new_dag_permissions(db):
     # Prefix individual dag perms with `DAG:`
     prefix_individual_dag_permissions(db.session)
 
@@ -158,7 +158,7 @@ def migrate_to_new_dag_permissions(db):  # noqa: D103
     db.session.commit()
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     db = SQLA()
     db.session = settings.Session
     migrate_to_new_dag_permissions(db)
@@ -166,5 +166,5 @@ def upgrade():  # noqa: D103
     db.session.close()
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     pass
diff --git a/airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py b/airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py
index 282286d..59cbf1a 100644
--- a/airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py
+++ b/airflow/migrations/versions/852ae6c715af_add_rendered_task_instance_fields_table.py
@@ -39,7 +39,7 @@ TABLE_NAME = 'rendered_task_instance_fields'
 def upgrade():
     """Apply Add RenderedTaskInstanceFields table"""
     json_type = sa.JSON
-    conn = op.get_bind()  # pylint: disable=no-member
+    conn = op.get_bind()
 
     if conn.dialect.name != "postgresql":
         # Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for
@@ -50,7 +50,7 @@ def upgrade():
             json_type = sa.Text
 
     op.create_table(
-        TABLE_NAME,  # pylint: disable=no-member
+        TABLE_NAME,
         sa.Column('dag_id', sa.String(length=250), nullable=False),
         sa.Column('task_id', sa.String(length=250), nullable=False),
         sa.Column('execution_date', sa.TIMESTAMP(timezone=True), nullable=False),
@@ -61,4 +61,4 @@ def upgrade():
 
 def downgrade():
     """Drop RenderedTaskInstanceFields table"""
-    op.drop_table(TABLE_NAME)  # pylint: disable=no-member
+    op.drop_table(TABLE_NAME)
diff --git a/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py b/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py
index fd8936c..92a97fa 100644
--- a/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py
+++ b/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py
@@ -67,6 +67,6 @@ def upgrade():
             batch_op.create_foreign_key('chart_user_id_fkey', 'users', ['user_id'], ['id'])
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     # Downgrade would fail because the broken FK constraint can't be re-created.
     pass
diff --git a/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py b/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py
index bf49873..c60049f 100644
--- a/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py
+++ b/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py
@@ -45,7 +45,9 @@ BATCH_SIZE = 5000
 ID_LEN = 250
 
 
-class TaskInstance(Base):  # noqa: D101  # type: ignore
+class TaskInstance(Base):  # type: ignore
+    """Task instance class."""
+
     __tablename__ = "task_instance"
 
     task_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True)
diff --git a/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py b/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py
index db3ccdc..bad9f5d 100644
--- a/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py
+++ b/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py
@@ -35,7 +35,7 @@ depends_on = None
 RESOURCE_TABLE = "kube_worker_uuid"
 
 
-def upgrade():  # noqa: D103
+def upgrade():
 
     columns_and_constraints = [
         sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True),
@@ -55,5 +55,5 @@ def upgrade():  # noqa: D103
     op.bulk_insert(table, [{"worker_uuid": ""}])
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_table(RESOURCE_TABLE)
diff --git a/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py b/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py
index ffb61a3..7c51df4 100644
--- a/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py
+++ b/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py
@@ -32,7 +32,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     with op.batch_alter_table('task_reschedule') as batch_op:
         batch_op.drop_constraint('task_reschedule_dag_task_date_fkey', type_='foreignkey')
         batch_op.create_foreign_key(
@@ -44,7 +44,7 @@ def upgrade():  # noqa: D103
         )
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     with op.batch_alter_table('task_reschedule') as batch_op:
         batch_op.drop_constraint('task_reschedule_dag_task_date_fkey', type_='foreignkey')
         batch_op.create_foreign_key(
diff --git a/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py b/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py
index a1d8b8f..e7c948e 100644
--- a/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py
+++ b/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py
@@ -32,9 +32,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_index('ti_job_id', 'task_instance', ['job_id'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index('ti_job_id', table_name='task_instance')
diff --git a/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py b/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py
index 63fb689..a580fc9 100644
--- a/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py
+++ b/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py
@@ -52,7 +52,7 @@ def upgrade():
 
     """Apply add source code table"""
     op.create_table(
-        'dag_code',  # pylint: disable=no-member
+        'dag_code',
         sa.Column('fileloc_hash', sa.BigInteger(), nullable=False, primary_key=True, autoincrement=False),
         sa.Column('fileloc', sa.String(length=2000), nullable=False),
         sa.Column('source_code', sa.UnicodeText(), nullable=False),
diff --git a/airflow/migrations/versions/9635ae0956e7_index_faskfail.py b/airflow/migrations/versions/9635ae0956e7_index_faskfail.py
index c924b3a..663a069 100644
--- a/airflow/migrations/versions/9635ae0956e7_index_faskfail.py
+++ b/airflow/migrations/versions/9635ae0956e7_index_faskfail.py
@@ -31,11 +31,11 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_index(
         'idx_task_fail_dag_task_date', 'task_fail', ['dag_id', 'task_id', 'execution_date'], unique=False
     )
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index('idx_task_fail_dag_task_date', table_name='task_fail')
diff --git a/airflow/migrations/versions/98271e7606e2_add_scheduling_decision_to_dagrun_and_.py b/airflow/migrations/versions/98271e7606e2_add_scheduling_decision_to_dagrun_and_.py
index 8019aa2..f4e144d 100644
--- a/airflow/migrations/versions/98271e7606e2_add_scheduling_decision_to_dagrun_and_.py
+++ b/airflow/migrations/versions/98271e7606e2_add_scheduling_decision_to_dagrun_and_.py
@@ -37,7 +37,7 @@ depends_on = None
 
 def upgrade():
     """Apply Add scheduling_decision to DagRun and DAG"""
-    conn = op.get_bind()  # pylint: disable=no-member
+    conn = op.get_bind()
     is_mysql = bool(conn.dialect.name == "mysql")
     is_sqlite = bool(conn.dialect.name == "sqlite")
     timestamp = sa.TIMESTAMP(timezone=True) if not is_mysql else mysql.TIMESTAMP(fsp=6, timezone=True)
@@ -85,7 +85,7 @@ def upgrade():
 
 def downgrade():
     """Unapply Add scheduling_decision to DagRun and DAG"""
-    conn = op.get_bind()  # pylint: disable=no-member
+    conn = op.get_bind()
     is_sqlite = bool(conn.dialect.name == "sqlite")
 
     if is_sqlite:
diff --git a/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py b/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py
index 121c7fa..4dd825e 100644
--- a/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py
+++ b/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py
@@ -34,9 +34,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('task_instance', sa.Column('pool_slots', sa.Integer, default=1))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('task_instance', 'pool_slots')
diff --git a/airflow/migrations/versions/b0125267960b_merge_heads.py b/airflow/migrations/versions/b0125267960b_merge_heads.py
index 5c05dd7..bba9e73 100644
--- a/airflow/migrations/versions/b0125267960b_merge_heads.py
+++ b/airflow/migrations/versions/b0125267960b_merge_heads.py
@@ -31,9 +31,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     pass
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     pass
diff --git a/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py b/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py
index 4b2cacd..d559faf 100644
--- a/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py
+++ b/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py
@@ -34,9 +34,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('connection', sa.Column('is_extra_encrypted', sa.Boolean, default=False))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('connection', 'is_extra_encrypted')
diff --git a/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py b/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py
index 2e73d05..4c63938 100644
--- a/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py
+++ b/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py
@@ -33,9 +33,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('sla_miss', sa.Column('notification_sent', sa.Boolean, default=False))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('sla_miss', 'notification_sent')
diff --git a/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py b/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py
index cd4fa0d..d69d24a 100644
--- a/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py
+++ b/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py
@@ -34,7 +34,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     # There can be data truncation here as LargeBinary can be smaller than the pickle
     # type.
     # use batch_alter_table to support SQLite workaround
@@ -42,7 +42,7 @@ def upgrade():  # noqa: D103
         batch_op.alter_column('value', type_=sa.LargeBinary())
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     # use batch_alter_table to support SQLite workaround
     with op.batch_alter_table("xcom") as batch_op:
         batch_op.alter_column('value', type_=sa.PickleType(pickler=dill))
diff --git a/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py b/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py
index 845ce35..7fa96ea 100644
--- a/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py
+++ b/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py
@@ -33,9 +33,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_index('ti_dag_date', 'task_instance', ['dag_id', 'execution_date'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index('ti_dag_date', table_name='task_instance')
diff --git a/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py b/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py
index c620286..ff02efb 100644
--- a/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py
+++ b/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py
@@ -34,11 +34,11 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('dag', sa.Column('description', sa.Text(), nullable=True))
     op.add_column('dag', sa.Column('default_view', sa.String(25), nullable=True))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('dag', 'description')
     op.drop_column('dag', 'default_view')
diff --git a/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py b/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py
index e6169b2..9e3bf7b 100644
--- a/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py
+++ b/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py
@@ -45,7 +45,9 @@ BATCH_SIZE = 5000
 ID_LEN = 250
 
 
-class TaskInstance(Base):  # noqa: D101  # type: ignore
+class TaskInstance(Base):  # type: ignore
+    """Task Instance class."""
+
     __tablename__ = "task_instance"
 
     task_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True)
@@ -55,7 +57,7 @@ class TaskInstance(Base):  # noqa: D101  # type: ignore
     try_number = Column(Integer, default=0)
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('task_instance', sa.Column('max_tries', sa.Integer, server_default="-1"))
     # Check if table task_instance exist before data migration. This check is
     # needed for database that does not create table until migration finishes.
@@ -97,7 +99,7 @@ def upgrade():  # noqa: D103
         session.commit()
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     engine = settings.engine
     if engine.dialect.has_table(engine, 'task_instance'):
         connection = op.get_bind()
diff --git a/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py b/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py
index 8200850..e725252 100644
--- a/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py
+++ b/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py
@@ -34,7 +34,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     # We previously had a KnownEvent's table, but we deleted the table without
     # a down migration to remove it (so we didn't delete anyone's data if they
     # were happening to use the feature.
@@ -60,7 +60,7 @@ def upgrade():  # noqa: D103
         op.drop_table("users")
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     conn = op.get_bind()
 
     op.create_table(
diff --git a/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py b/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py
index e8d445d..ece0234 100644
--- a/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py
+++ b/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py
@@ -33,13 +33,13 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
-    conn = op.get_bind()  # pylint: disable=no-member
+def upgrade():
+    conn = op.get_bind()
     if conn.dialect.name == "mysql":
         op.alter_column(table_name='variable', column_name='val', type_=mysql.MEDIUMTEXT)
 
 
-def downgrade():  # noqa: D103
-    conn = op.get_bind()  # pylint: disable=no-member
+def downgrade():
+    conn = op.get_bind()
     if conn.dialect.name == "mysql":
         op.alter_column(table_name='variable', column_name='val', type_=mysql.TEXT)
diff --git a/airflow/migrations/versions/d38e04c12aa2_add_serialized_dag_table.py b/airflow/migrations/versions/d38e04c12aa2_add_serialized_dag_table.py
index 2a446e6..d8022b4 100644
--- a/airflow/migrations/versions/d38e04c12aa2_add_serialized_dag_table.py
+++ b/airflow/migrations/versions/d38e04c12aa2_add_serialized_dag_table.py
@@ -37,7 +37,7 @@ depends_on = None
 def upgrade():
     """Upgrade version."""
     json_type = sa.JSON
-    conn = op.get_bind()  # pylint: disable=no-member
+    conn = op.get_bind()
 
     if conn.dialect.name != "postgresql":
         # Mysql 5.7+/MariaDB 10.2.3 has JSON support. Rather than checking for
@@ -48,7 +48,7 @@ def upgrade():
             json_type = sa.Text
 
     op.create_table(
-        'serialized_dag',  # pylint: disable=no-member
+        'serialized_dag',
         sa.Column('dag_id', sa.String(length=250), nullable=False),
         sa.Column('fileloc', sa.String(length=2000), nullable=False),
         sa.Column('fileloc_hash', sa.Integer(), nullable=False),
@@ -56,7 +56,7 @@ def upgrade():
         sa.Column('last_updated', sa.DateTime(), nullable=False),
         sa.PrimaryKeyConstraint('dag_id'),
     )
-    op.create_index('idx_fileloc_hash', 'serialized_dag', ['fileloc_hash'])  # pylint: disable=no-member
+    op.create_index('idx_fileloc_hash', 'serialized_dag', ['fileloc_hash'])
 
     if conn.dialect.name == "mysql":
         conn.execute("SET time_zone = '+00:00'")
@@ -65,7 +65,7 @@ def upgrade():
         if res[0][0] == 0:
             raise Exception("Global variable explicit_defaults_for_timestamp needs to be on (1) for mysql")
 
-        op.alter_column(  # pylint: disable=no-member
+        op.alter_column(
             table_name="serialized_dag",
             column_name="last_updated",
             type_=mysql.TIMESTAMP(fsp=6),
@@ -81,7 +81,7 @@ def upgrade():
         if conn.dialect.name == "postgresql":
             conn.execute("set timezone=UTC")
 
-        op.alter_column(  # pylint: disable=no-member
+        op.alter_column(
             table_name="serialized_dag",
             column_name="last_updated",
             type_=sa.TIMESTAMP(timezone=True),
@@ -90,4 +90,4 @@ def upgrade():
 
 def downgrade():
     """Downgrade version."""
-    op.drop_table('serialized_dag')  # pylint: disable=no-member
+    op.drop_table('serialized_dag')
diff --git a/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py b/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py
index 220535a..560b763 100644
--- a/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py
+++ b/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py
@@ -31,9 +31,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_index('idx_log_dag', 'log', ['dag_id'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_index('idx_log_dag', table_name='log')
diff --git a/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py b/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py
index b5fdc29..776527e 100644
--- a/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py
+++ b/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py
@@ -34,9 +34,9 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.add_column('dag', sa.Column('schedule_interval', sa.Text(), nullable=True))
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_column('dag', 'schedule_interval')
diff --git a/airflow/migrations/versions/e38be357a868_update_schema_for_smart_sensor.py b/airflow/migrations/versions/e38be357a868_update_schema_for_smart_sensor.py
index e603199..851b97f 100644
--- a/airflow/migrations/versions/e38be357a868_update_schema_for_smart_sensor.py
+++ b/airflow/migrations/versions/e38be357a868_update_schema_for_smart_sensor.py
@@ -36,19 +36,19 @@ branch_labels = None
 depends_on = None
 
 
-def mssql_timestamp():  # noqa: D103
+def mssql_timestamp():
     return sa.DateTime()
 
 
-def mysql_timestamp():  # noqa: D103
+def mysql_timestamp():
     return mysql.TIMESTAMP(fsp=6)
 
 
-def sa_timestamp():  # noqa: D103
+def sa_timestamp():
     return sa.TIMESTAMP(timezone=True)
 
 
-def upgrade():  # noqa: D103
+def upgrade():
 
     conn = op.get_bind()
     inspector = Inspector.from_engine(conn)
@@ -89,7 +89,7 @@ def upgrade():  # noqa: D103
     op.create_index('si_updated_at', 'sensor_instance', ['updated_at'], unique=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     conn = op.get_bind()
     inspector = Inspector.from_engine(conn)
     tables = inspector.get_table_names()
diff --git a/airflow/migrations/versions/e3a246e0dc1_current_schema.py b/airflow/migrations/versions/e3a246e0dc1_current_schema.py
index 60e6cdf..bd0a8a3 100644
--- a/airflow/migrations/versions/e3a246e0dc1_current_schema.py
+++ b/airflow/migrations/versions/e3a246e0dc1_current_schema.py
@@ -38,7 +38,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     conn = op.get_bind()
     inspector = Inspector.from_engine(conn)
     tables = inspector.get_table_names()
@@ -221,7 +221,7 @@ def upgrade():  # noqa: D103
         )
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_table('chart')
     op.drop_table('variable')
     op.drop_table('user')
diff --git a/airflow/migrations/versions/e959f08ac86c_change_field_in_dagcode_to_mediumtext_.py b/airflow/migrations/versions/e959f08ac86c_change_field_in_dagcode_to_mediumtext_.py
index ef1c0a5..559aa46 100644
--- a/airflow/migrations/versions/e959f08ac86c_change_field_in_dagcode_to_mediumtext_.py
+++ b/airflow/migrations/versions/e959f08ac86c_change_field_in_dagcode_to_mediumtext_.py
@@ -33,15 +33,15 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
-    conn = op.get_bind()  # pylint: disable=no-member
+def upgrade():
+    conn = op.get_bind()
     if conn.dialect.name == "mysql":
         op.alter_column(
             table_name='dag_code', column_name='source_code', type_=mysql.MEDIUMTEXT, nullable=False
         )
 
 
-def downgrade():  # noqa: D103
-    conn = op.get_bind()  # pylint: disable=no-member
+def downgrade():
+    conn = op.get_bind()
     if conn.dialect.name == "mysql":
         op.alter_column(table_name='dag_code', column_name='source_code', type_=mysql.TEXT, nullable=False)
diff --git a/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py b/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py
index 7a0a3c8..77424ec 100644
--- a/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py
+++ b/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py
@@ -32,7 +32,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     conn = op.get_bind()
     if conn.dialect.name == 'mysql':
         conn.execute("SET time_zone = '+00:00'")
@@ -41,7 +41,7 @@ def upgrade():  # noqa: D103
         op.alter_column('xcom', 'timestamp', existing_type=mysql.TIMESTAMP(fsp=6), nullable=False)
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     conn = op.get_bind()
     if conn.dialect.name == 'mysql':
         conn.execute("SET time_zone = '+00:00'")
diff --git a/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py b/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py
index 1db0440..bfc191a 100644
--- a/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py
+++ b/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py
@@ -33,7 +33,7 @@ branch_labels = None
 depends_on = None
 
 
-def upgrade():  # noqa: D103
+def upgrade():
     op.create_table(
         'dag_stats',
         sa.Column('dag_id', sa.String(length=250), nullable=False),
@@ -44,5 +44,5 @@ def upgrade():  # noqa: D103
     )
 
 
-def downgrade():  # noqa: D103
+def downgrade():
     op.drop_table('dag_stats')
diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py
index 61606ea..cd6e795 100644
--- a/airflow/models/__init__.py
+++ b/airflow/models/__init__.py
@@ -23,7 +23,7 @@ from airflow.models.dag import DAG, DagModel, DagTag
 from airflow.models.dagbag import DagBag
 from airflow.models.dagpickle import DagPickle
 from airflow.models.dagrun import DagRun
-from airflow.models.errors import ImportError  # pylint: disable=redefined-builtin
+from airflow.models.errors import ImportError
 from airflow.models.log import Log
 from airflow.models.pool import Pool
 from airflow.models.renderedtifields import RenderedTaskInstanceFields
diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py
index 7af23d3..10e8bfd 100644
--- a/airflow/models/baseoperator.py
+++ b/airflow/models/baseoperator.py
@@ -74,13 +74,13 @@ from airflow.utils.trigger_rule import TriggerRule
 from airflow.utils.weight_rule import WeightRule
 
 if TYPE_CHECKING:
-    from airflow.utils.task_group import TaskGroup  # pylint: disable=cyclic-import
+    from airflow.utils.task_group import TaskGroup
 
 ScheduleInterval = Union[str, timedelta, relativedelta]
 
 TaskStateChangeCallback = Callable[[Context], None]
 
-T = TypeVar('T', bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar('T', bound=Callable)
 
 
 class BaseOperatorMeta(abc.ABCMeta):
@@ -110,7 +110,6 @@ class BaseOperatorMeta(abc.ABCMeta):
             and param.kind not in (param.VAR_POSITIONAL, param.VAR_KEYWORD)
         }
 
-        # pylint: disable=invalid-name,missing-docstring
         class autostacklevel_warn:
             def __init__(self):
                 self.warnings = __import__('warnings')
@@ -124,8 +123,6 @@ class BaseOperatorMeta(abc.ABCMeta):
             def warn(self, message, category=None, stacklevel=1, source=None):
                 self.warnings.warn(message, category, stacklevel + 2, source)
 
-        # pylint: enable=invalid-name,missing-docstring
-
         if func.__globals__.get('warnings') is sys.modules['warnings']:
             # Yes, this is slightly hacky, but it _automatically_ sets the right
             # stacklevel parameter to `warnings.warn` to ignore the decorator. Now
@@ -176,7 +173,7 @@ class BaseOperatorMeta(abc.ABCMeta):
                 kwargs['default_args'] = default_args
 
             if hasattr(self, '_hook_apply_defaults'):
-                args, kwargs = self._hook_apply_defaults(*args, **kwargs)  # pylint: disable=protected-access
+                args, kwargs = self._hook_apply_defaults(*args, **kwargs)
 
             result = func(self, *args, **kwargs)
 
@@ -184,7 +181,7 @@ class BaseOperatorMeta(abc.ABCMeta):
             self.set_xcomargs_dependencies()
 
             # Mark instance as instantiated https://docs.python.org/3/tutorial/classes.html#private-variables
-            self._BaseOperator__instantiated = True  # pylint: disable=protected-access
+            self._BaseOperator__instantiated = True
             return result
 
         return cast(T, apply_defaults)
@@ -195,7 +192,6 @@ class BaseOperatorMeta(abc.ABCMeta):
         return new_cls
 
 
-# pylint: disable=too-many-instance-attributes,too-many-public-methods
 @functools.total_ordering
 class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta):
     """
@@ -456,7 +452,6 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
     # Set to True before calling execute method
     _lock_for_execution = False
 
-    # pylint: disable=too-many-arguments,too-many-locals, too-many-statements
     def __init__(
         self,
         task_id: str,
@@ -474,7 +469,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
         wait_for_downstream: bool = False,
         dag=None,
         params: Optional[Dict] = None,
-        default_args: Optional[Dict] = None,  # pylint: disable=unused-argument
+        default_args: Optional[Dict] = None,
         priority_weight: int = 1,
         weight_rule: str = WeightRule.DOWNSTREAM,
         queue: str = conf.get('operators', 'default_queue'),
@@ -584,7 +579,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
             self.retry_delay = retry_delay
         else:
             self.log.debug("Retry_delay isn't timedelta object, assuming secs")
-            self.retry_delay = timedelta(seconds=retry_delay)  # noqa
+            self.retry_delay = timedelta(seconds=retry_delay)
         self.retry_exponential_backoff = retry_exponential_backoff
         self.max_retry_delay = max_retry_delay
         if max_retry_delay:
@@ -592,7 +587,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
                 self.max_retry_delay = max_retry_delay
             else:
                 self.log.debug("Max_retry_delay isn't timedelta object, assuming secs")
-                self.max_retry_delay = timedelta(seconds=max_retry_delay)  # noqa
+                self.max_retry_delay = timedelta(seconds=max_retry_delay)
 
         self.params = params or {}  # Available in templates!
         self.priority_weight = priority_weight
@@ -819,7 +814,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
         returns a copy of the task
         """
         other = copy.copy(self)
-        other._lock_for_execution = True  # pylint: disable=protected-access
+        other._lock_for_execution = True
         return other
 
     def set_xcomargs_dependencies(self) -> None:
@@ -845,7 +840,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
         """
         from airflow.models.xcom_arg import XComArg
 
-        def apply_set_upstream(arg: Any):  # noqa
+        def apply_set_upstream(arg: Any):
             if isinstance(arg, XComArg):
                 self.set_upstream(arg.operator)
             elif isinstance(arg, (tuple, set, list)):
@@ -963,13 +958,11 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
         result = cls.__new__(cls)
         memo[id(self)] = result
 
-        shallow_copy = (
-            cls.shallow_copy_attrs + cls._base_operator_shallow_copy_attrs
-        )  # pylint: disable=protected-access
+        shallow_copy = cls.shallow_copy_attrs + cls._base_operator_shallow_copy_attrs
 
         for k, v in self.__dict__.items():
             if k not in shallow_copy:
-                setattr(result, k, copy.deepcopy(v, memo))  # noqa
+                setattr(result, k, copy.deepcopy(v, memo))
             else:
                 setattr(result, k, copy.copy(v))
         return result
@@ -981,7 +974,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
         return state
 
     def __setstate__(self, state):
-        self.__dict__ = state  # pylint: disable=attribute-defined-outside-init
+        self.__dict__ = state
         self._log = logging.getLogger("airflow.task.operators")
 
     def render_template_fields(self, context: Dict, jinja_env: Optional[jinja2.Environment] = None) -> None:
@@ -1012,7 +1005,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
                 rendered_content = self.render_template(content, context, jinja_env, seen_oids)
                 setattr(parent, attr_name, rendered_content)
 
-    def render_template(  # pylint: disable=too-many-return-statements
+    def render_template(
         self,
         content: Any,
         context: Dict,
@@ -1051,10 +1044,10 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
             return content.resolve(context)
 
         if isinstance(content, tuple):
-            if type(content) is not tuple:  # pylint: disable=unidiomatic-typecheck
+            if type(content) is not tuple:
                 # Special case for named tuples
                 return content.__class__(
-                    *(self.render_template(element, context, jinja_env) for element in content)  # noqa
+                    *(self.render_template(element, context, jinja_env) for element in content)
                 )
             else:
                 return tuple(self.render_template(element, context, jinja_env) for element in content)
@@ -1093,7 +1086,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
             self.dag.get_template_env()
             if self.has_dag()
             else airflow.templates.SandboxedEnvironment(cache_size=0)
-        )  # noqa
+        )
 
     def prepare_template(self) -> None:
         """
@@ -1105,26 +1098,26 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
 
     def resolve_template_files(self) -> None:
         """Getting the content of files for template_field / template_ext"""
-        if self.template_ext:  # pylint: disable=too-many-nested-blocks
+        if self.template_ext:
             for field in self.template_fields:
                 content = getattr(self, field, None)
-                if content is None:  # pylint: disable=no-else-continue
+                if content is None:
                     continue
                 elif isinstance(content, str) and any(content.endswith(ext) for ext in self.template_ext):
                     env = self.get_template_env()
                     try:
                         setattr(self, field, env.loader.get_source(env, content)[0])
-                    except Exception as e:  # pylint: disable=broad-except
+                    except Exception as e:
                         self.log.exception(e)
                 elif isinstance(content, list):
                     env = self.dag.get_template_env()
-                    for i in range(len(content)):  # pylint: disable=consider-using-enumerate
+                    for i in range(len(content)):
                         if isinstance(content[i], str) and any(
                             content[i].endswith(ext) for ext in self.template_ext
                         ):
                             try:
                                 content[i] = env.loader.get_source(env, content[i])[0]
-                            except Exception as e:  # pylint: disable=broad-except
+                            except Exception as e:
                                 self.log.exception(e)
         self.prepare_template()
 
@@ -1334,9 +1327,7 @@ class BaseOperator(Operator, LoggingMixin, TaskMixin, metaclass=BaseOperatorMeta
         # relationships can only be set if the tasks share a single DAG. Tasks
         # without a DAG are assigned to that DAG.
         dags = {
-            task._dag.dag_id: task._dag  # type: ignore  # pylint: disable=protected-access,no-member
-            for task in self.roots + task_list
-            if task.has_dag()  # pylint: disable=no-member
+            task._dag.dag_id: task._dag for task in self.roots + task_list if task.has_dag()  # type: ignore
         }
 
         if len(dags) > 1:
@@ -1643,7 +1634,7 @@ def cross_downstream(
 class BaseOperatorLink(metaclass=ABCMeta):
     """Abstract base class that defines how we get an operator link."""
 
-    operators: ClassVar[List[Type[BaseOperator]]] = []  # pylint: disable=invalid-name
+    operators: ClassVar[List[Type[BaseOperator]]] = []
     """
     This property will be used by Airflow Plugins to find the Operators to which you want
     to assign this Operator Link
diff --git a/airflow/models/connection.py b/airflow/models/connection.py
index 73d0d8d..ed80741 100644
--- a/airflow/models/connection.py
+++ b/airflow/models/connection.py
@@ -57,7 +57,7 @@ def _parse_netloc_to_hostname(uri_parts):
     return hostname
 
 
-class Connection(Base, LoggingMixin):  # pylint: disable=too-many-instance-attributes
+class Connection(Base, LoggingMixin):
     """
     Placeholder to store information about different database instances
     connection information. The idea here is that scripts use references to
@@ -107,7 +107,7 @@ class Connection(Base, LoggingMixin):  # pylint: disable=too-many-instance-attri
     is_extra_encrypted = Column(Boolean, unique=False, default=False)
     _extra = Column('extra', Text())
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         conn_id: Optional[str] = None,
         conn_type: Optional[str] = None,
@@ -125,9 +125,7 @@ class Connection(Base, LoggingMixin):  # pylint: disable=too-many-instance-attri
         self.description = description
         if extra and not isinstance(extra, str):
             extra = json.dumps(extra)
-        if uri and (  # pylint: disable=too-many-boolean-expressions
-            conn_type or host or login or password or schema or port or extra
-        ):
+        if uri and (conn_type or host or login or password or schema or port or extra):
             raise AirflowException(
                 "You must create an object using the URI or individual values "
                 "(conn_type, host, login, password, schema, port or extra)."
@@ -148,7 +146,7 @@ class Connection(Base, LoggingMixin):  # pylint: disable=too-many-instance-attri
             mask_secret(self.password)
 
     @reconstructor
-    def on_db_load(self):  # pylint: disable=missing-function-docstring
+    def on_db_load(self):
         if self.password:
             mask_secret(self.password)
 
@@ -246,7 +244,7 @@ class Connection(Base, LoggingMixin):  # pylint: disable=too-many-instance-attri
             self.is_encrypted = fernet.is_encrypted
 
     @declared_attr
-    def password(cls):  # pylint: disable=no-self-argument
+    def password(cls):
         """Password. The value is decrypted/encrypted when reading/setting the value."""
         return synonym('_password', descriptor=property(cls.get_password, cls.set_password))
 
@@ -276,7 +274,7 @@ class Connection(Base, LoggingMixin):  # pylint: disable=too-many-instance-attri
             self.is_extra_encrypted = False
 
     @declared_attr
-    def extra(cls):  # pylint: disable=no-self-argument
+    def extra(cls):
         """Extra data. The value is decrypted/encrypted when reading/setting the value."""
         return synonym('_extra', descriptor=property(cls.get_extra, cls.set_extra))
 
diff --git a/airflow/models/crypto.py b/airflow/models/crypto.py
index d6e0ee8..b57c537 100644
--- a/airflow/models/crypto.py
+++ b/airflow/models/crypto.py
@@ -28,7 +28,7 @@ log = logging.getLogger(__name__)
 
 
 class FernetProtocol(Protocol):
-    """This class is only used for TypeChecking (for IDEs, mypy, pylint, etc)"""
+    """This class is only used for TypeChecking (for IDEs, mypy, etc)"""
 
     def decrypt(self, b):
         """Decrypt with Fernet"""
@@ -71,7 +71,7 @@ def get_fernet():
     :return: Fernet object
     :raises: airflow.exceptions.AirflowException if there's a problem trying to load Fernet
     """
-    global _fernet  # pylint: disable=global-statement
+    global _fernet
 
     if _fernet:
         return _fernet
diff --git a/airflow/models/dag.py b/airflow/models/dag.py
index 53c3fea..1861eda 100644
--- a/airflow/models/dag.py
+++ b/airflow/models/dag.py
@@ -1872,7 +1872,7 @@ class DAG(LoggingMixin):
             session.query(DagRun.dag_id, func.count('*'))
             .filter(
                 DagRun.dag_id.in_(existing_dag_ids),
-                DagRun.state == State.RUNNING,  # pylint: disable=comparison-with-callable
+                DagRun.state == State.RUNNING,
                 DagRun.external_trigger.is_(False),
             )
             .group_by(DagRun.dag_id)
diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py
index ae21b90..5d84ba2 100644
--- a/airflow/models/dagbag.py
+++ b/airflow/models/dagbag.py
@@ -286,7 +286,7 @@ class DagBag(LoggingMixin):
                 and file_last_changed_on_disk == self.file_last_changed[filepath]
             ):
                 return []
-        except Exception as e:  # pylint: disable=broad-except
+        except Exception as e:
             self.log.exception(e)
             return []
 
@@ -325,7 +325,7 @@ class DagBag(LoggingMixin):
                 sys.modules[spec.name] = new_module
                 loader.exec_module(new_module)
                 return [new_module]
-            except Exception as e:  # pylint: disable=broad-except
+            except Exception as e:
                 self.log.exception("Failed to import: %s", filepath)
                 if self.dagbag_import_error_tracebacks:
                     self.import_errors[filepath] = traceback.format_exc(
@@ -368,7 +368,7 @@ class DagBag(LoggingMixin):
                     sys.path.insert(0, filepath)
                     current_module = importlib.import_module(mod_name)
                     mods.append(current_module)
-                except Exception as e:  # pylint: disable=broad-except
+                except Exception as e:
                     self.log.exception("Failed to import: %s", filepath)
                     if self.dagbag_import_error_tracebacks:
                         self.import_errors[filepath] = traceback.format_exc(
@@ -520,7 +520,7 @@ class DagBag(LoggingMixin):
                         dags=str([dag.dag_id for dag in found_dags]),
                     )
                 )
-            except Exception as e:  # pylint: disable=broad-except
+            except Exception as e:
                 self.log.exception(e)
 
         self.dagbag_stats = sorted(stats, key=lambda x: x.duration, reverse=True)
@@ -594,7 +594,7 @@ class DagBag(LoggingMixin):
                 return []
             except OperationalError:
                 raise
-            except Exception:  # pylint: disable=broad-except
+            except Exception:
                 return [(dag.fileloc, traceback.format_exc(limit=-self.dagbag_import_error_traceback_depth))]
 
         # Retry 'DAG.bulk_write_to_db' & 'SerializedDagModel.bulk_sync_to_db' in case
diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py
index 6bd96f3..3bd86ee 100644
--- a/airflow/models/dagrun.py
+++ b/airflow/models/dagrun.py
@@ -701,7 +701,7 @@ class DagRun(Base, LoggingMixin):
             session.query(DagRun)
             .filter(
                 DagRun.dag_id == dag_id,
-                DagRun.external_trigger == False,  # noqa pylint: disable=singleton-comparison
+                DagRun.external_trigger == False,  # noqa
                 DagRun.execution_date == execution_date,
             )
             .first()
diff --git a/airflow/models/errors.py b/airflow/models/errors.py
index 47f5bd7..9718c06 100644
--- a/airflow/models/errors.py
+++ b/airflow/models/errors.py
@@ -22,7 +22,7 @@ from airflow.models.base import Base
 from airflow.utils.sqlalchemy import UtcDateTime
 
 
-class ImportError(Base):  # pylint: disable=redefined-builtin
+class ImportError(Base):
     """
     A table to store all Import Errors. The ImportErrors are recorded when parsing DAGs.
     This errors are displayed on the Webserver.
diff --git a/airflow/models/pool.py b/airflow/models/pool.py
index 3d152ee..6f217c4 100644
--- a/airflow/models/pool.py
+++ b/airflow/models/pool.py
@@ -53,7 +53,7 @@ class Pool(Base):
     DEFAULT_POOL_NAME = 'default_pool'
 
     def __repr__(self):
-        return str(self.pool)  # pylint: disable=E0012
+        return str(self.pool)
 
     @staticmethod
     @provide_session
diff --git a/airflow/models/serialized_dag.py b/airflow/models/serialized_dag.py
index bba58ad..e7b5157 100644
--- a/airflow/models/serialized_dag.py
+++ b/airflow/models/serialized_dag.py
@@ -169,23 +169,21 @@ class SerializedDagModel(Base):
     @property
     def dag(self):
         """The DAG deserialized from the ``data`` column"""
-        SerializedDAG._load_operator_extra_links = self.load_op_links  # pylint: disable=protected-access
+        SerializedDAG._load_operator_extra_links = self.load_op_links
 
         if isinstance(self.data, dict):
             dag = SerializedDAG.from_dict(self.data)  # type: Any
         else:
-            dag = SerializedDAG.from_json(self.data)  # noqa
+            dag = SerializedDAG.from_json(self.data)
         return dag
 
     @classmethod
     @provide_session
     def remove_dag(cls, dag_id: str, session: Session = None):
         """Deletes a DAG with given dag_id.
-
         :param dag_id: dag_id to be deleted
         :param session: ORM Session
         """
-        # pylint: disable=no-member
         session.execute(cls.__table__.delete().where(cls.dag_id == dag_id))
 
     @classmethod
@@ -202,7 +200,6 @@ class SerializedDagModel(Base):
             "Deleting Serialized DAGs (for which DAG files are deleted) from %s table ", cls.__tablename__
         )
 
-        # pylint: disable=no-member
         session.execute(
             cls.__table__.delete().where(
                 and_(cls.fileloc_hash.notin_(alive_fileloc_hashes), cls.fileloc.notin_(alive_dag_filelocs))
diff --git a/airflow/models/skipmixin.py b/airflow/models/skipmixin.py
index dc40329..489da52 100644
--- a/airflow/models/skipmixin.py
+++ b/airflow/models/skipmixin.py
@@ -96,7 +96,7 @@ class SkipMixin(LoggingMixin):
 
         # SkipMixin may not necessarily have a task_id attribute. Only store to XCom if one is available.
         try:
-            task_id = self.task_id  # noqa
+            task_id = self.task_id
         except AttributeError:
             task_id = None
 
diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py
index 2def707..76cb079 100644
--- a/airflow/models/taskinstance.py
+++ b/airflow/models/taskinstance.py
@@ -116,7 +116,7 @@ def load_error_file(fd: IO[bytes]) -> Optional[Union[str, Exception]]:
         return None
     try:
         return pickle.loads(data)
-    except Exception:  # pylint: disable=broad-except
+    except Exception:
         return "Failed to load task run error"
 
 
@@ -125,7 +125,7 @@ def set_error_file(error_file: str, error: Union[str, Exception]) -> None:
     with open(error_file, "wb") as fd:
         try:
             pickle.dump(error, fd)
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             # local class objects cannot be pickled, so we fallback
             # to store the string representation instead
             pickle.dump(str(error), fd)
@@ -205,7 +205,7 @@ def clear_task_instances(
     if job_ids:
         from airflow.jobs.base_job import BaseJob
 
-        for job in session.query(BaseJob).filter(BaseJob.id.in_(job_ids)).all():  # noqa
+        for job in session.query(BaseJob).filter(BaseJob.id.in_(job_ids)).all():
             job.state = State.SHUTDOWN
 
     if activate_dag_runs is not None:
@@ -263,7 +263,7 @@ class TaskInstanceKey(NamedTuple):
         return TaskInstanceKey(self.dag_id, self.task_id, self.execution_date, try_number)
 
 
-class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
+class TaskInstance(Base, LoggingMixin):
     """
     Task instances store the state of a task instance. This table is the
     authority and single source of truth around what tasks have run and the
@@ -400,7 +400,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
         """Setting Next Try Number"""
         return self._try_number + 1
 
-    def command_as_list(  # pylint: disable=too-many-arguments
+    def command_as_list(
         self,
         mark_success=False,
         ignore_all_deps=False,
@@ -449,7 +449,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
 
     @staticmethod
     def generate_command(
-        dag_id: str,  # pylint: disable=too-many-arguments
+        dag_id: str,
         task_id: str,
         execution_date: datetime,
         mark_success: bool = False,
@@ -631,7 +631,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
             self.state = ti.state
             # Get the raw value of try_number column, don't read through the
             # accessor here otherwise it will be incremented by one already.
-            self.try_number = ti._try_number  # noqa pylint: disable=protected-access
+            self.try_number = ti._try_number
             self.max_tries = ti.max_tries
             self.hostname = ti.hostname
             self.unixname = ti.unixname
@@ -932,7 +932,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
             ti_hash = int(
                 hashlib.sha1(
                     "{}#{}#{}#{}".format(
-                        self.dag_id, self.task_id, self.execution_date, self.try_number  # noqa
+                        self.dag_id, self.task_id, self.execution_date, self.try_number
                     ).encode('utf-8')
                 ).hexdigest(),
                 16,
@@ -976,7 +976,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
         return dr
 
     @provide_session
-    def check_and_change_state_before_execution(  # pylint: disable=too-many-arguments
+    def check_and_change_state_before_execution(
         self,
         verbose: bool = True,
         ignore_all_deps: bool = False,
@@ -1280,7 +1280,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
         task_copy = task.prepare_for_execution()
         self.task = task_copy
 
-        def signal_handler(signum, frame):  # pylint: disable=unused-argument
+        def signal_handler(signum, frame):
             self.log.error("Received SIGTERM. Terminating subprocesses.")
             task_copy.on_kill()
             raise AirflowException("Task received SIGTERM signal")
@@ -1315,7 +1315,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
                 registered = False
                 try:
                     registered = task_copy.register_in_sensor_service(self, context)
-                except Exception:  # pylint: disable=broad-except
+                except Exception:
                     self.log.warning(
                         "Failed to register in sensor service."
                         " Continue to run task in non smart sensor mode.",
@@ -1369,7 +1369,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
         try:
             if task.on_execute_callback:
                 task.on_execute_callback(context)
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             self.log.exception("Failed when executing execute callback")
 
     def _run_finished_callback(self, error: Optional[Union[str, Exception]] = None) -> None:
@@ -1398,7 +1398,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
                 task.on_retry_callback(context)
 
     @provide_session
-    def run(  # pylint: disable=too-many-arguments
+    def run(
         self,
         verbose: bool = True,
         ignore_all_deps: bool = False,
@@ -1553,7 +1553,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
         if email_for_state and task.email:
             try:
                 self.email_alert(error)
-            except Exception:  # pylint: disable=broad-except
+            except Exception:
                 self.log.exception('Failed to send email to: %s', task.email)
 
         if not test_mode:
@@ -1582,7 +1582,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
         return ''
 
     @provide_session
-    def get_template_context(self, session=None) -> Context:  # pylint: disable=too-many-locals
+    def get_template_context(self, session=None) -> Context:
         """Return TI Context"""
         task = self.task
         from airflow import macros
@@ -1673,8 +1673,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
             @staticmethod
             def get(
                 item: str,
-                # pylint: disable=protected-access
-                default_var: Any = Variable._Variable__NO_DEFAULT_SENTINEL,  # noqa
+                default_var: Any = Variable._Variable__NO_DEFAULT_SENTINEL,
             ):
                 """Get Airflow Variable value"""
                 return Variable.get(item, default_var=default_var)
@@ -1702,8 +1701,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
             @staticmethod
             def get(
                 item: str,
-                # pylint: disable=protected-access
-                default_var: Any = Variable._Variable__NO_DEFAULT_SENTINEL,  # noqa
+                default_var: Any = Variable._Variable__NO_DEFAULT_SENTINEL,
             ):
                 """Get Airflow Variable after deserializing JSON value"""
                 return Variable.get(item, default_var=default_var, deserialize_json=True)
@@ -1946,7 +1944,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
         )
 
     @provide_session
-    def xcom_pull(  # pylint: disable=inconsistent-return-statements
+    def xcom_pull(
         self,
         task_ids: Optional[Union[str, Iterable[str]]] = None,
         dag_id: Optional[str] = None,
@@ -2099,7 +2097,6 @@ class SimpleTaskInstance:
         self._queue: str = ti.queue
         self._key = ti.key
 
-    # pylint: disable=missing-docstring
     @property
     def dag_id(self) -> str:
         return self._dag_id
diff --git a/airflow/models/variable.py b/airflow/models/variable.py
index 44627c0..7d47269 100644
--- a/airflow/models/variable.py
+++ b/airflow/models/variable.py
@@ -58,7 +58,7 @@ class Variable(Base, LoggingMixin):
         self.description = description
 
     @reconstructor
-    def on_db_load(self):  # pylint: disable=missing-function-docstring
+    def on_db_load(self):
         if self._val:
             mask_secret(self.val, self.key)
 
@@ -75,7 +75,7 @@ class Variable(Base, LoggingMixin):
             except InvalidFernetToken:
                 self.log.error("Can't decrypt _val for key=%s, invalid token or value", self.key)
                 return None
-            except Exception:  # pylint: disable=broad-except
+            except Exception:
                 self.log.error("Can't decrypt _val for key=%s, FERNET_KEY configuration missing", self.key)
                 return None
         else:
@@ -89,7 +89,7 @@ class Variable(Base, LoggingMixin):
             self.is_encrypted = fernet.is_encrypted
 
     @declared_attr
-    def val(cls):  # pylint: disable=no-self-argument
+    def val(cls):
         """Get Airflow Variable from Metadata DB and decode it using the Fernet Key"""
         return synonym('_val', descriptor=property(cls.get_val, cls.set_val))
 
diff --git a/airflow/models/xcom_arg.py b/airflow/models/xcom_arg.py
index eae583e..dd08ab3 100644
--- a/airflow/models/xcom_arg.py
+++ b/airflow/models/xcom_arg.py
@@ -18,7 +18,7 @@
 from typing import Any, Dict, List, Optional, Sequence, Union
 
 from airflow.exceptions import AirflowException
-from airflow.models.baseoperator import BaseOperator  # pylint: disable=R0401
+from airflow.models.baseoperator import BaseOperator
 from airflow.models.taskmixin import TaskMixin
 from airflow.models.xcom import XCOM_RETURN_KEY
 from airflow.utils.edgemodifier import EdgeModifier
diff --git a/airflow/mypy/plugin/decorators.py b/airflow/mypy/plugin/decorators.py
index 3267ca1..192dc37 100644
--- a/airflow/mypy/plugin/decorators.py
+++ b/airflow/mypy/plugin/decorators.py
@@ -20,9 +20,9 @@ import copy
 import functools
 from typing import List
 
-from mypy.nodes import ARG_NAMED_OPT  # pylint: disable=no-name-in-module
-from mypy.plugin import FunctionContext, Plugin  # pylint: disable=no-name-in-module
-from mypy.types import CallableType, NoneType, UnionType  # pylint: disable=no-name-in-module
+from mypy.nodes import ARG_NAMED_OPT
+from mypy.plugin import FunctionContext, Plugin
+from mypy.types import CallableType, NoneType, UnionType
 
 TYPED_DECORATORS = {
     "fallback_to_default_project_id of GoogleBaseHook": ["project_id"],
@@ -75,6 +75,6 @@ def _change_decorator_function_type(
     return decorator
 
 
-def plugin(version: str):  # pylint: disable=unused-argument
+def plugin(version: str):
     """Mypy plugin entrypoint."""
     return TypedDecoratorPlugin
diff --git a/airflow/operators/bash_operator.py b/airflow/operators/bash_operator.py
index 8c389fe..3b7764d 100644
--- a/airflow/operators/bash_operator.py
+++ b/airflow/operators/bash_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.operators.bash import BashOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/branch_operator.py b/airflow/operators/branch_operator.py
index 218e26e..b4c71d5 100644
--- a/airflow/operators/branch_operator.py
+++ b/airflow/operators/branch_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.operators.branch import BaseBranchOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/dagrun_operator.py b/airflow/operators/dagrun_operator.py
index 0f4990f..bdcc667 100644
--- a/airflow/operators/dagrun_operator.py
+++ b/airflow/operators/dagrun_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.operators.trigger_dagrun import TriggerDagRunLink, TriggerDagRunOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/docker_operator.py b/airflow/operators/docker_operator.py
index e815d66..88235b4 100644
--- a/airflow/operators/docker_operator.py
+++ b/airflow/operators/docker_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.docker.operators.docker import DockerOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/druid_check_operator.py b/airflow/operators/druid_check_operator.py
index 5e605e5..008a917 100644
--- a/airflow/operators/druid_check_operator.py
+++ b/airflow/operators/druid_check_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.druid.operators.druid_check import DruidCheckOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/dummy_operator.py b/airflow/operators/dummy_operator.py
index c166c7f..6b2fbee 100644
--- a/airflow/operators/dummy_operator.py
+++ b/airflow/operators/dummy_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.operators.dummy import DummyOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/email.py b/airflow/operators/email.py
index 69807f2..324ec1a 100644
--- a/airflow/operators/email.py
+++ b/airflow/operators/email.py
@@ -50,7 +50,7 @@ class EmailOperator(BaseOperator):
     template_ext = ('.html',)
     ui_color = '#e6faf9'
 
-    def __init__(  # pylint: disable=invalid-name
+    def __init__(
         self,
         *,
         to: Union[List[str], str],
@@ -65,11 +65,11 @@ class EmailOperator(BaseOperator):
         **kwargs,
     ) -> None:
         super().__init__(**kwargs)
-        self.to = to  # pylint: disable=invalid-name
+        self.to = to
         self.subject = subject
         self.html_content = html_content
         self.files = files or []
-        self.cc = cc  # pylint: disable=invalid-name
+        self.cc = cc
         self.bcc = bcc
         self.mime_subtype = mime_subtype
         self.mime_charset = mime_charset
diff --git a/airflow/operators/email_operator.py b/airflow/operators/email_operator.py
index d1cf2ea..80901d0 100644
--- a/airflow/operators/email_operator.py
+++ b/airflow/operators/email_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.operators.email import EmailOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/gcs_to_s3.py b/airflow/operators/gcs_to_s3.py
index ed709b2..d02bc7f 100644
--- a/airflow/operators/gcs_to_s3.py
+++ b/airflow/operators/gcs_to_s3.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/hive_operator.py b/airflow/operators/hive_operator.py
index 545b66f..b49cf09 100644
--- a/airflow/operators/hive_operator.py
+++ b/airflow/operators/hive_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.hive.operators.hive import HiveOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/hive_stats_operator.py b/airflow/operators/hive_stats_operator.py
index 96d7b52..af1e260 100644
--- a/airflow/operators/hive_stats_operator.py
+++ b/airflow/operators/hive_stats_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.hive.operators.hive_stats import HiveStatsCollectionOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/hive_to_druid.py b/airflow/operators/hive_to_druid.py
index 6c40c90..a6537a1 100644
--- a/airflow/operators/hive_to_druid.py
+++ b/airflow/operators/hive_to_druid.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.apache.druid.transfers.hive_to_druid`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator
 
 warnings.warn(
diff --git a/airflow/operators/hive_to_mysql.py b/airflow/operators/hive_to_mysql.py
index 4ce9c82..0a13c76 100644
--- a/airflow/operators/hive_to_mysql.py
+++ b/airflow/operators/hive_to_mysql.py
@@ -22,7 +22,6 @@ Please use `airflow.providers.apache.hive.transfers.hive_to_mysql`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
 
 warnings.warn(
diff --git a/airflow/operators/hive_to_samba_operator.py b/airflow/operators/hive_to_samba_operator.py
index b696912..ed3b180 100644
--- a/airflow/operators/hive_to_samba_operator.py
+++ b/airflow/operators/hive_to_samba_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/http_operator.py b/airflow/operators/http_operator.py
index 6c75520..6e2ab56 100644
--- a/airflow/operators/http_operator.py
+++ b/airflow/operators/http_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.http.operators.http import SimpleHttpOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/jdbc_operator.py b/airflow/operators/jdbc_operator.py
index f739157..ff36f9f 100644
--- a/airflow/operators/jdbc_operator.py
+++ b/airflow/operators/jdbc_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.jdbc.operators.jdbc import JdbcOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/latest_only_operator.py b/airflow/operators/latest_only_operator.py
index 42e0f93..07644f4 100644
--- a/airflow/operators/latest_only_operator.py
+++ b/airflow/operators/latest_only_operator.py
@@ -18,7 +18,6 @@
 """This module is deprecated. Please use :mod:`airflow.operators.latest_only`"""
 import warnings
 
-# pylint: disable=unused-import
 from airflow.operators.latest_only import LatestOnlyOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/mssql_operator.py b/airflow/operators/mssql_operator.py
index 1bdd2c5..d1047b8 100644
--- a/airflow/operators/mssql_operator.py
+++ b/airflow/operators/mssql_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.microsoft.mssql.operators.mssql import MsSqlOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/mysql_operator.py b/airflow/operators/mysql_operator.py
index c3b4a52..82a94ed 100644
--- a/airflow/operators/mysql_operator.py
+++ b/airflow/operators/mysql_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.mysql.operators.mysql import MySqlOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/oracle_operator.py b/airflow/operators/oracle_operator.py
index 059e723..8ad61db 100644
--- a/airflow/operators/oracle_operator.py
+++ b/airflow/operators/oracle_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.oracle.operators.oracle import OracleOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/papermill_operator.py b/airflow/operators/papermill_operator.py
index 61ee255..5d63e38 100644
--- a/airflow/operators/papermill_operator.py
+++ b/airflow/operators/papermill_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.papermill.operators.papermill import PapermillOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/pig_operator.py b/airflow/operators/pig_operator.py
index ff2659e..3b2ea0e 100644
--- a/airflow/operators/pig_operator.py
+++ b/airflow/operators/pig_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.apache.pig.operators.pig import PigOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/postgres_operator.py b/airflow/operators/postgres_operator.py
index 949ac06..e5dc53c 100644
--- a/airflow/operators/postgres_operator.py
+++ b/airflow/operators/postgres_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.postgres.operators.postgres import Mapping, PostgresOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/presto_check_operator.py b/airflow/operators/presto_check_operator.py
index b573162..693471f 100644
--- a/airflow/operators/presto_check_operator.py
+++ b/airflow/operators/presto_check_operator.py
@@ -19,8 +19,7 @@
 
 import warnings
 
-# pylint: disable=unused-import
-from airflow.operators.sql import SQLCheckOperator, SQLIntervalCheckOperator, SQLValueCheckOperator  # noqa
+from airflow.operators.sql import SQLCheckOperator, SQLIntervalCheckOperator, SQLValueCheckOperator
 
 warnings.warn(
     "This module is deprecated. Please use `airflow.operators.sql`.", DeprecationWarning, stacklevel=2
diff --git a/airflow/operators/presto_to_mysql.py b/airflow/operators/presto_to_mysql.py
index fba566a..bfc1173 100644
--- a/airflow/operators/presto_to_mysql.py
+++ b/airflow/operators/presto_to_mysql.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.mysql.transfers.presto_to_mysql`.
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator
 
 warnings.warn(
diff --git a/airflow/operators/python.py b/airflow/operators/python.py
index fa8020c..2aea470 100644
--- a/airflow/operators/python.py
+++ b/airflow/operators/python.py
@@ -62,7 +62,7 @@ def task(python_callable: Optional[Callable] = None, multiple_outputs: Optional[
     """
     # To maintain backwards compatibility, we import the task object into this file
     # This prevents breakages in dags that use `from airflow.operators.python import task`
-    from airflow.decorators.python import python_task  # noqa # pylint: disable=unused-import
+    from airflow.decorators.python import python_task
 
     warnings.warn(
         """airflow.operators.python.task is deprecated. Please use the following instead
@@ -295,7 +295,7 @@ class PythonVirtualenvOperator(PythonOperator):
     }
     AIRFLOW_SERIALIZABLE_CONTEXT_KEYS = {'macros', 'conf', 'dag', 'dag_run', 'task'}
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         *,
         python_callable: Callable,
diff --git a/airflow/operators/python_operator.py b/airflow/operators/python_operator.py
index 75d9dbb..ac8c644 100644
--- a/airflow/operators/python_operator.py
+++ b/airflow/operators/python_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.operators.python import (  # noqa
     BranchPythonOperator,
     PythonOperator,
diff --git a/airflow/operators/s3_file_transform_operator.py b/airflow/operators/s3_file_transform_operator.py
index a4221de..828031d 100644
--- a/airflow/operators/s3_file_transform_operator.py
+++ b/airflow/operators/s3_file_transform_operator.py
@@ -22,7 +22,6 @@ Please use :mod:`airflow.providers.amazon.aws.operators.s3_file_transform`
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.operators.s3_file_transform import S3FileTransformOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/slack_operator.py b/airflow/operators/slack_operator.py
index 311a4ca..3af49e2 100644
--- a/airflow/operators/slack_operator.py
+++ b/airflow/operators/slack_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.slack.operators.slack import SlackAPIOperator, SlackAPIPostOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/sqlite_operator.py b/airflow/operators/sqlite_operator.py
index 79f7eff..68791d6 100644
--- a/airflow/operators/sqlite_operator.py
+++ b/airflow/operators/sqlite_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.sqlite.operators.sqlite import SqliteOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/operators/subdag_operator.py b/airflow/operators/subdag_operator.py
index 1323faf..bb5a088 100644
--- a/airflow/operators/subdag_operator.py
+++ b/airflow/operators/subdag_operator.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.operators.subdag import SkippedStatePropagationOptions, SubDagOperator  # noqa
 
 warnings.warn(
diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py
index 2eeb985..575a37a 100644
--- a/airflow/plugins_manager.py
+++ b/airflow/plugins_manager.py
@@ -172,7 +172,7 @@ def is_valid_plugin(plugin_obj):
     :return: Whether or not the obj is a valid subclass of
         AirflowPlugin
     """
-    global plugins  # pylint: disable=global-statement
+    global plugins
 
     if (
         inspect.isclass(plugin_obj)
@@ -190,7 +190,7 @@ def register_plugin(plugin_instance):
 
     :param plugin_instance: subclass of AirflowPlugin
     """
-    global plugins  # pylint: disable=global-statement
+    global plugins
     plugin_instance.on_load()
     plugins.append(plugin_instance)
 
@@ -200,7 +200,7 @@ def load_entrypoint_plugins():
     Load and register plugins AirflowPlugin subclasses from the entrypoints.
     The entry_point group should be 'airflow.plugins'.
     """
-    global import_errors  # pylint: disable=global-statement
+    global import_errors
 
     log.debug("Loading plugins from entrypoints")
 
@@ -214,14 +214,14 @@ def load_entrypoint_plugins():
             plugin_instance = plugin_class()
             plugin_instance.source = EntryPointSource(entry_point, dist)
             register_plugin(plugin_instance)
-        except Exception as e:  # pylint: disable=broad-except
+        except Exception as e:
             log.exception("Failed to import plugin %s", entry_point.name)
             import_errors[entry_point.module] = str(e)
 
 
 def load_plugins_from_plugin_directory():
     """Load and register Airflow Plugins from plugins directory"""
-    global import_errors  # pylint: disable=global-statement
+    global import_errors
     log.debug("Loading plugins from directory: %s", settings.PLUGINS_FOLDER)
 
     for file_path in find_path_from_directory(settings.PLUGINS_FOLDER, ".airflowignore"):
@@ -243,12 +243,11 @@ def load_plugins_from_plugin_directory():
                 plugin_instance = mod_attr_value()
                 plugin_instance.source = PluginsDirectorySource(file_path)
                 register_plugin(plugin_instance)
-        except Exception as e:  # pylint: disable=broad-except
+        except Exception as e:
             log.exception('Failed to import plugin %s', file_path)
             import_errors[file_path] = str(e)
 
 
-# pylint: disable=protected-access
 def make_module(name: str, objects: List[Any]):
     """Creates new module."""
     if not objects:
@@ -262,9 +261,6 @@ def make_module(name: str, objects: List[Any]):
     return module
 
 
-# pylint: enable=protected-access
-
-
 def ensure_plugins_loaded():
     """
     Load plugins from plugins directory and entrypoints.
@@ -273,7 +269,7 @@ def ensure_plugins_loaded():
     """
     from airflow.stats import Stats
 
-    global plugins, registered_hooks  # pylint: disable=global-statement
+    global plugins, registered_hooks
 
     if plugins is not None:
         log.debug("Plugins are already loaded. Skipping.")
@@ -303,12 +299,10 @@ def ensure_plugins_loaded():
 
 def initialize_web_ui_plugins():
     """Collect extension points for WEB UI"""
-    # pylint: disable=global-statement
     global plugins
     global flask_blueprints
     global flask_appbuilder_views
     global flask_appbuilder_menu_links
-    # pylint: enable=global-statement
 
     if (
         flask_blueprints is not None
@@ -345,11 +339,9 @@ def initialize_web_ui_plugins():
 
 def initialize_extra_operators_links_plugins():
     """Creates modules for loaded extension from extra operators links plugins"""
-    # pylint: disable=global-statement
     global global_operator_extra_links
     global operator_extra_links
     global registered_operator_link_classes
-    # pylint: enable=global-statement
 
     if (
         global_operator_extra_links is not None
@@ -383,10 +375,8 @@ def initialize_extra_operators_links_plugins():
 
 def integrate_executor_plugins() -> None:
     """Integrate executor plugins to the context."""
-    # pylint: disable=global-statement
     global plugins
     global executors_modules
-    # pylint: enable=global-statement
 
     if executors_modules is not None:
         return
@@ -407,15 +397,14 @@ def integrate_executor_plugins() -> None:
         executors_module = make_module('airflow.executors.' + plugin_name, plugin.executors)
         if executors_module:
             executors_modules.append(executors_module)
-            sys.modules[executors_module.__name__] = executors_module  # pylint: disable=no-member
+            sys.modules[executors_module.__name__] = executors_module
 
 
 def integrate_macros_plugins() -> None:
     """Integrates macro plugins."""
-    # pylint: disable=global-statement
     global plugins
     global macros_modules
-    # pylint: enable=global-statement
+
     from airflow import macros
 
     if macros_modules is not None:
@@ -438,7 +427,7 @@ def integrate_macros_plugins() -> None:
 
         if macros_module:
             macros_modules.append(macros_module)
-            sys.modules[macros_module.__name__] = macros_module  # pylint: disable=no-member
+            sys.modules[macros_module.__name__] = macros_module
             # Register the newly created module on airflow.macros such that it
             # can be accessed when rendering templates.
             setattr(macros, plugin.name, macros_module)
diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py
index cfc4b2c7..9b842c2 100644
--- a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py
+++ b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py
@@ -52,7 +52,6 @@ YOUTUBE_VIDEO_FIELDS = getenv("YOUTUBE_VIDEO_FIELDS", "items(id,snippet(descript
 # [END howto_operator_google_api_to_s3_transfer_advanced_env_variables]
 
 
-# pylint: disable=unused-argument
 # [START howto_operator_google_api_to_s3_transfer_advanced_task_1_2]
 def _check_and_transform_video_ids(xcom_key, task_ids, task_instance, **kwargs):
     video_ids_response = task_instance.xcom_pull(task_ids=task_ids, key=xcom_key)
@@ -65,7 +64,7 @@ def _check_and_transform_video_ids(xcom_key, task_ids, task_instance, **kwargs):
 
 
 # [END howto_operator_google_api_to_s3_transfer_advanced_task_1_2]
-# pylint: enable=unused-argument
+
 
 s3_directory, s3_file = S3_DESTINATION_KEY.rsplit('/', 1)
 s3_file_name, _ = s3_file.rsplit('.', 1)
diff --git a/airflow/providers/amazon/aws/hooks/athena.py b/airflow/providers/amazon/aws/hooks/athena.py
index c7037fb..2b30fd8 100644
--- a/airflow/providers/amazon/aws/hooks/athena.py
+++ b/airflow/providers/amazon/aws/hooks/athena.py
@@ -100,12 +100,12 @@ class AWSAthenaHook(AwsBaseHook):
         state = None
         try:
             state = response['QueryExecution']['Status']['State']
-        except Exception as ex:  # pylint: disable=broad-except
+        except Exception as ex:
             self.log.error('Exception while getting query state %s', ex)
         finally:
             # The error is being absorbed here and is being handled by the caller.
             # The error is being absorbed to implement retries.
-            return state  # pylint: disable=lost-exception
+            return state
 
     def get_state_change_reason(self, query_execution_id: str) -> Optional[str]:
         """
@@ -119,12 +119,12 @@ class AWSAthenaHook(AwsBaseHook):
         reason = None
         try:
             reason = response['QueryExecution']['Status']['StateChangeReason']
-        except Exception as ex:  # pylint: disable=broad-except
+        except Exception as ex:
             self.log.error('Exception while getting query state change reason: %s', ex)
         finally:
             # The error is being absorbed here and is being handled by the caller.
             # The error is being absorbed to implement retries.
-            return reason  # pylint: disable=lost-exception
+            return reason
 
     def get_query_results(
         self, query_execution_id: str, next_token_id: Optional[str] = None, max_results: int = 1000
diff --git a/airflow/providers/amazon/aws/hooks/aws_dynamodb.py b/airflow/providers/amazon/aws/hooks/aws_dynamodb.py
index 92153b9..dedb800 100644
--- a/airflow/providers/amazon/aws/hooks/aws_dynamodb.py
+++ b/airflow/providers/amazon/aws/hooks/aws_dynamodb.py
@@ -19,7 +19,6 @@
 
 import warnings
 
-# pylint: disable=unused-import
 from airflow.providers.amazon.aws.hooks.dynamodb import AwsDynamoDBHook  # noqa
 
 warnings.warn(
diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/airflow/providers/amazon/aws/hooks/base_aws.py
index c1c5b1d..5d6e22b 100644
--- a/airflow/providers/amazon/aws/hooks/base_aws.py
+++ b/airflow/providers/amazon/aws/hooks/base_aws.py
@@ -115,7 +115,7 @@ class _SessionFactory(LoggingMixin):
             botocore_session = self._assume_role_with_web_identity(
                 role_arn=role_arn,
                 assume_role_kwargs=assume_role_kwargs,
-                base_session=session._session,  # pylint: disable=protected-access
+                base_session=session._session,
             )
             return boto3.session.Session(
                 region_name=session.region_name,
@@ -286,7 +286,7 @@ class _SessionFactory(LoggingMixin):
             time_fetcher=lambda: datetime.datetime.now(tz=tzlocal()),
         )
         botocore_session = botocore.session.Session()
-        botocore_session._credentials = aws_creds  # pylint: disable=protected-access
+        botocore_session._credentials = aws_creds
         return botocore_session
 
     def _get_google_identity_token_loader(self):
diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/airflow/providers/amazon/aws/hooks/batch_client.py
index 6aa7156..ed392a9 100644
--- a/airflow/providers/amazon/aws/hooks/batch_client.py
+++ b/airflow/providers/amazon/aws/hooks/batch_client.py
@@ -38,13 +38,6 @@ from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 from airflow.typing_compat import Protocol, runtime_checkable
 
-# Add exceptions to pylint for the boto3 protocol only; ideally the boto3 library
-# could provide
-# protocols for all their dynamically generated classes (try to migrate this to a PR on botocore).
-# Note that the use of invalid-name parameters should be restricted to the boto3 mappings only;
-# all the Airflow wrappers of boto3 clients should not adopt invalid-names to match boto3.
-# pylint: disable=invalid-name, unused-argument
-
 
 @runtime_checkable
 class AwsBatchProtocol(Protocol):
@@ -155,7 +148,6 @@ class AwsBatchProtocol(Protocol):
 
 # Note that the use of invalid-name parameters should be restricted to the boto3 mappings only;
 # all the Airflow wrappers of boto3 clients should not adopt invalid-names to match boto3.
-# pylint: enable=invalid-name, unused-argument
 
 
 class AwsBatchClientHook(AwsBaseHook):
@@ -211,9 +203,9 @@ class AwsBatchClientHook(AwsBaseHook):
         self.status_retries = status_retries or self.STATUS_RETRIES
 
     @property
-    def client(self) -> Union[AwsBatchProtocol, botocore.client.BaseClient]:  # noqa: D402
+    def client(self) -> Union[AwsBatchProtocol, botocore.client.BaseClient]:
         """
-        An AWS API client for batch services, like ``boto3.client('batch')``
+        An AWS API client for batch services.
 
         :return: a boto3 'batch' client for the ``.region_name``
         :rtype: Union[AwsBatchProtocol, botocore.client.BaseClient]
diff --git a/airflow/providers/amazon/aws/hooks/glue.py b/airflow/providers/amazon/aws/hooks/glue.py
index 8d1cba2..d24b968 100644
--- a/airflow/providers/amazon/aws/hooks/glue.py
+++ b/airflow/providers/amazon/aws/hooks/glue.py
@@ -64,7 +64,7 @@ class AwsGlueJobHook(AwsBaseHook):
         create_job_kwargs: Optional[dict] = None,
         *args,
         **kwargs,
-    ):  # pylint: disable=too-many-arguments
+    ):
         self.job_name = job_name
         self.desc = desc
         self.concurrent_run_limit = concurrent_run_limit
diff --git a/airflow/providers/amazon/aws/hooks/glue_crawler.py b/airflow/providers/amazon/aws/hooks/glue_crawler.py
index 0ba3519..0b1ef5b 100644
--- a/airflow/providers/amazon/aws/hooks/glue_crawler.py
+++ b/airflow/providers/amazon/aws/hooks/glue_crawler.py
@@ -141,9 +141,7 @@ class AwsGlueCrawlerHook(AwsBaseHook):
                 self.log.info("crawler_config: %s", crawler)
                 crawler_status = crawler['LastCrawl']['Status']
                 if crawler_status in failed_status:
-                    raise AirflowException(
-                        f"Status: {crawler_status}"
-                    )  # pylint: disable=raising-format-tuple
+                    raise AirflowException(f"Status: {crawler_status}")
                 else:
                     metrics = self.glue_client.get_crawler_metrics(CrawlerNameList=[crawler_name])[
                         'CrawlerMetricsList'
diff --git a/airflow/providers/amazon/aws/hooks/redshift.py b/airflow/providers/amazon/aws/hooks/redshift.py
index 33e419c..d85834c 100644
--- a/airflow/providers/amazon/aws/hooks/redshift.py
+++ b/airflow/providers/amazon/aws/hooks/redshift.py
@@ -58,7 +58,7 @@ class RedshiftHook(AwsBaseHook):
         except self.get_conn().exceptions.ClusterNotFoundFault:
             return 'cluster_not_found'
 
-    def delete_cluster(  # pylint: disable=invalid-name
+    def delete_cluster(
         self,
         cluster_identifier: str,
         skip_final_cluster_snapshot: bool = True,
diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py
index 2e18754..11bd9f6 100644
--- a/airflow/providers/amazon/aws/hooks/s3.py
+++ b/airflow/providers/amazon/aws/hooks/s3.py
@@ -16,7 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# pylint: disable=invalid-name
+
 """Interact with AWS S3, using the boto3 library."""
 import fnmatch
 import gzip as gz
@@ -38,7 +38,7 @@ from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 from airflow.utils.helpers import chunks
 
-T = TypeVar("T", bound=Callable)  # pylint: disable=invalid-name
+T = TypeVar("T", bound=Callable)
 
 
 def provide_bucket_name(func: T) -> T:
diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py
index 756d888..dc4de1e 100644
--- a/airflow/providers/amazon/aws/hooks/sagemaker.py
+++ b/airflow/providers/amazon/aws/hooks/sagemaker.py
@@ -131,7 +131,7 @@ def secondary_training_status_message(
     return '\n'.join(status_strs)
 
 
-class SageMakerHook(AwsBaseHook):  # pylint: disable=too-many-public-methods
+class SageMakerHook(AwsBaseHook):
     """
     Interact with Amazon SageMaker.
 
@@ -853,9 +853,9 @@ class SageMakerHook(AwsBaseHook):  # pylint: disable=too-many-public-methods
 
     def list_training_jobs(
         self, name_contains: Optional[str] = None, max_results: Optional[int] = None, **kwargs
-    ) -> List[Dict]:  # noqa: D402
+    ) -> List[Dict]:
         """
-        This method wraps boto3's list_training_jobs(). The training job name and max results are configurable
+        This method wraps boto3's `list_training_jobs`. The training job name and max results are configurable
         via arguments. Other arguments are not, and should be provided via kwargs. Note boto3 expects these in
         CamelCase format, for example:
 
@@ -892,9 +892,9 @@ class SageMakerHook(AwsBaseHook):  # pylint: disable=too-many-public-methods
         )
         return results
 
-    def list_processing_jobs(self, **kwargs) -> List[Dict]:  # noqa: D402
+    def list_processing_jobs(self, **kwargs) -> List[Dict]:
         """
-        This method wraps boto3's list_processing_jobs(). All arguments should be provided via kwargs.
+        This method wraps boto3's `list_processing_jobs`. All arguments should be provided via kwargs.
         Note boto3 expects these in CamelCase format, for example:
 
         .. code-block:: python
diff --git a/airflow/providers/amazon/aws/hooks/ses.py b/airflow/providers/amazon/aws/hooks/ses.py
index 5c7423f..0c7e39f 100644
--- a/airflow/providers/amazon/aws/hooks/ses.py
+++ b/airflow/providers/amazon/aws/hooks/ses.py
@@ -36,7 +36,7 @@ class SESHook(AwsBaseHook):
         kwargs['client_type'] = 'ses'
         super().__init__(*args, **kwargs)
 
-    def send_email(  # pylint: disable=too-many-arguments
+    def send_email(
         self,
         mail_from: str,
         to: Union[str, Iterable[str]],
diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
index 8584c1c..0b0a103 100644
--- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
+++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
@@ -62,7 +62,7 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
             from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
 
             return AwsLogsHook(aws_conn_id=remote_conn_id, region_name=self.region_name)
-        except Exception as e:  # pylint: disable=broad-except
+        except Exception as e:
             self.log.error(
                 'Could not create an AwsLogsHook with connection id "%s". '
                 'Please make sure that apache-airflow[aws] is installed and '
@@ -122,7 +122,7 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
             )
 
             return '\n'.join(self._event_to_str(event) for event in events)
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             msg = 'Could not read remote logs from log_group: {} log_stream: {}.'.format(
                 self.log_group, stream_name
             )
diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py
index f357fa3..7a6b3d8 100644
--- a/airflow/providers/amazon/aws/log/s3_task_handler.py
+++ b/airflow/providers/amazon/aws/log/s3_task_handler.py
@@ -50,7 +50,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
             from airflow.providers.amazon.aws.hooks.s3 import S3Hook
 
             return S3Hook(remote_conn_id, transfer_config_args={"use_threads": False})
-        except Exception as e:  # pylint: disable=broad-except
+        except Exception as e:
             self.log.exception(
                 'Could not create an S3Hook with connection id "%s". '
                 'Please make sure that apache-airflow[aws] is installed and '
@@ -119,7 +119,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
 
         try:
             log_exists = self.s3_log_exists(remote_loc)
-        except Exception as error:  # pylint: disable=broad-except
+        except Exception as error:
             self.log.exception("Failed to verify remote log exists %s.", remote_loc)
             log = f'*** Failed to verify remote log exists {remote_loc}.\n{error}\n'
 
@@ -159,7 +159,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
         """
         try:
             return self.hook.read_key(remote_log_location)
-        except Exception as error:  # pylint: disable=broad-except
+        except Exception as error:
             msg = f'Could not read logs from {remote_log_location} with error: {error}'
             self.log.exception(msg)
             # return error if needed
@@ -184,7 +184,7 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
             if append and self.s3_log_exists(remote_log_location):
                 old_log = self.s3_read(remote_log_location)
                 log = '\n'.join([old_log, log]) if old_log else log
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             self.log.exception('Could not verify previous log to append')
 
         try:
@@ -194,5 +194,5 @@ class S3TaskHandler(FileTaskHandler, LoggingMixin):
                 replace=True,
                 encrypt=conf.getboolean('logging', 'ENCRYPT_S3_LOGS'),
             )
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             self.log.exception('Could not write logs to %s', remote_log_location)
diff --git a/airflow/providers/amazon/aws/operators/athena.py b/airflow/providers/amazon/aws/operators/athena.py
index 0b7d77d..633b6a3 100644
--- a/airflow/providers/amazon/aws/operators/athena.py
+++ b/airflow/providers/amazon/aws/operators/athena.py
@@ -59,7 +59,7 @@ class AWSAthenaOperator(BaseOperator):
     template_ext = ('.sql',)
     template_fields_renderers = {"query": "sql"}
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         *,
         query: str,
@@ -131,7 +131,7 @@ class AWSAthenaOperator(BaseOperator):
             http_status_code = None
             try:
                 http_status_code = response['ResponseMetadata']['HTTPStatusCode']
-            except Exception as ex:  # pylint: disable=broad-except
+            except Exception as ex:
                 self.log.error('Exception while cancelling query: %s', ex)
             finally:
                 if http_status_code is None or http_status_code != 200:
diff --git a/airflow/providers/amazon/aws/operators/batch.py b/airflow/providers/amazon/aws/operators/batch.py
index 296b81e..46c5515 100644
--- a/airflow/providers/amazon/aws/operators/batch.py
+++ b/airflow/providers/amazon/aws/operators/batch.py
@@ -118,7 +118,7 @@ class AwsBatchOperator(BaseOperator):
         region_name: Optional[str] = None,
         tags: Optional[dict] = None,
         **kwargs,
-    ):  # pylint: disable=too-many-arguments
+    ):
 
         BaseOperator.__init__(self, **kwargs)
         self.job_id = job_id
@@ -150,7 +150,7 @@ class AwsBatchOperator(BaseOperator):
         response = self.hook.client.terminate_job(jobId=self.job_id, reason="Task killed by the user")
         self.log.info("AWS Batch job (%s) terminated: %s", self.job_id, response)
 
-    def submit_job(self, context: Dict):  # pylint: disable=unused-argument
+    def submit_job(self, context: Dict):
         """
         Submit an AWS Batch job
 
@@ -181,7 +181,7 @@ class AwsBatchOperator(BaseOperator):
             self.log.error("AWS Batch job (%s) failed submission", self.job_id)
             raise AirflowException(e)
 
-    def monitor_job(self, context: Dict):  # pylint: disable=unused-argument
+    def monitor_job(self, context: Dict):
         """
         Monitor an AWS Batch job
 
diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py
index 6c88eb1..750479b 100644
--- a/airflow/providers/amazon/aws/operators/datasync.py
+++ b/airflow/providers/amazon/aws/operators/datasync.py
@@ -26,7 +26,6 @@ from airflow.models import BaseOperator
 from airflow.providers.amazon.aws.hooks.datasync import AWSDataSyncHook
 
 
-# pylint: disable=too-many-instance-attributes, too-many-arguments
 class AWSDataSyncOperator(BaseOperator):
     r"""Find, Create, Update, Execute and Delete AWS DataSync Tasks.
 
diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/airflow/providers/amazon/aws/operators/ecs.py
index 01b1307..4c8e75b 100644
--- a/airflow/providers/amazon/aws/operators/ecs.py
+++ b/airflow/providers/amazon/aws/operators/ecs.py
@@ -54,7 +54,6 @@ class ECSProtocol(Protocol):
         - https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html
     """
 
-    # pylint: disable=C0103, line-too-long
     def run_task(self, **kwargs) -> Dict:
         """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.run_task"""  # noqa: E501
         ...
@@ -79,10 +78,8 @@ class ECSProtocol(Protocol):
         """https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ecs.html#ECS.Client.list_tasks"""  # noqa: E501
         ...
 
-    # pylint: enable=C0103, line-too-long
 
-
-class ECSOperator(BaseOperator):  # pylint: disable=too-many-instance-attributes
+class ECSOperator(BaseOperator):
     """
     Execute a task on AWS ECS (Elastic Container Service)
 
@@ -149,7 +146,7 @@ class ECSOperator(BaseOperator):  # pylint: disable=too-many-instance-attributes
         *,
         task_definition: str,
         cluster: str,
-        overrides: dict,  # pylint: disable=too-many-arguments
+        overrides: dict,
         aws_conn_id: Optional[str] = None,
         region_name: Optional[str] = None,
         launch_type: str = 'EC2',
diff --git a/airflow/providers/amazon/aws/operators/glue.py b/airflow/providers/amazon/aws/operators/glue.py
index 2544579..248c96b 100644
--- a/airflow/providers/amazon/aws/operators/glue.py
+++ b/airflow/providers/amazon/aws/operators/glue.py
@@ -75,7 +75,7 @@ class AwsGlueJobOperator(BaseOperator):
         iam_role_name: Optional[str] = None,
         create_job_kwargs: Optional[dict] = None,
         **kwargs,
-    ):  # pylint: disable=too-many-arguments
+    ):
         super().__init__(**kwargs)
         self.job_name = job_name
         self.job_desc = job_desc
diff --git a/airflow/providers/amazon/aws/operators/s3_file_transform.py b/airflow/providers/amazon/aws/operators/s3_file_transform.py
index 3ab861f..b6911a7 100644
--- a/airflow/providers/amazon/aws/operators/s3_file_transform.py
+++ b/airflow/providers/amazon/aws/operators/s3_file_transform.py
@@ -96,7 +96,7 @@ class S3FileTransformOperator(BaseOperator):
         replace: bool = False,
         **kwargs,
     ) -> None:
-        # pylint: disable=too-many-arguments
+
         super().__init__(**kwargs)
         self.source_s3_key = source_s3_key
         self.source_aws_conn_id = source_aws_conn_id
diff --git a/airflow/providers/amazon/aws/operators/sagemaker_base.py b/airflow/providers/amazon/aws/operators/sagemaker_base.py
index 8c414c1..4614f07 100644
--- a/airflow/providers/amazon/aws/operators/sagemaker_base.py
+++ b/airflow/providers/amazon/aws/operators/sagemaker_base.py
@@ -81,8 +81,8 @@ class SageMakerBaseOperator(BaseOperator):
         for field in self.integer_fields:
             self.parse_integer(self.config, field)
 
-    def expand_role(self):  # noqa: D402
-        """Placeholder for calling boto3's expand_role(), which expands an IAM role name into an ARN."""
+    def expand_role(self):
+        """Placeholder for calling boto3's `expand_role`, which expands an IAM role name into an ARN."""
 
     def preprocess_config(self):
         """Process the config into a usable form."""
diff --git a/airflow/providers/amazon/aws/sensors/sagemaker_base.py b/airflow/providers/amazon/aws/sensors/sagemaker_base.py
index fe2af29..8a0956e 100644
--- a/airflow/providers/amazon/aws/sensors/sagemaker_base.py
+++ b/airflow/providers/amazon/aws/sensors/sagemaker_base.py
@@ -76,7 +76,7 @@ class SageMakerBaseSensor(BaseSensorOperator):
         """Placeholder for checking status of a SageMaker task."""
         raise NotImplementedError('Please implement get_sagemaker_response() in subclass')
 
-    def get_failed_reason_from_response(self, response: dict) -> str:  # pylint: disable=unused-argument
+    def get_failed_reason_from_response(self, response: dict) -> str:
         """Placeholder for extracting the reason for failure from an AWS response."""
         return 'Unknown'
 
diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
index 7e0e410..da25e8b 100644
--- a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
@@ -88,7 +88,7 @@ class DynamoDBToS3Operator(BaseOperator):
     :type s3_bucket_name: str
     :param file_size: Flush file to s3 if file size >= file_size
     :type file_size: int
-    :param dynamodb_scan_kwargs: kwargs pass to <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.scan>  # noqa: E501 pylint: disable=line-too-long
+    :param dynamodb_scan_kwargs: kwargs pass to <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Table.scan>  # noqa: E501
     :type dynamodb_scan_kwargs: Optional[Dict[str, Any]]
     :param s3_key_prefix: Prefix of s3 object key
     :type s3_key_prefix: Optional[str]
@@ -147,6 +147,6 @@ class DynamoDBToS3Operator(BaseOperator):
             if getsize(temp_file.name) >= self.file_size:
                 _upload_file_to_s3(temp_file, self.s3_bucket_name, self.s3_key_prefix)
                 temp_file.close()
-                # pylint: disable=consider-using-with
+
                 temp_file = NamedTemporaryFile()
         return temp_file
diff --git a/airflow/providers/amazon/aws/transfers/exasol_to_s3.py b/airflow/providers/amazon/aws/transfers/exasol_to_s3.py
index 6784292..e5b53c6 100644
--- a/airflow/providers/amazon/aws/transfers/exasol_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/exasol_to_s3.py
@@ -60,7 +60,7 @@ class ExasolToS3Operator(BaseOperator):
     template_ext = ('.sql',)
     ui_color = '#ededed'
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         *,
         query_or_table: str,
diff --git a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
index 0684305..d4159ba 100644
--- a/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
@@ -95,7 +95,7 @@ class GCSToS3Operator(BaseOperator):
 
     def __init__(
         self,
-        *,  # pylint: disable=too-many-arguments
+        *,
         bucket: str,
         prefix: Optional[str] = None,
         delimiter: Optional[str] = None,
diff --git a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
index 5f027b5..52023ce 100644
--- a/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
+++ b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
@@ -59,7 +59,7 @@ class HiveToDynamoDBOperator(BaseOperator):
     template_ext = ('.sql',)
     ui_color = '#a0e08c'
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         *,
         sql: str,
diff --git a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py
index b88036d..95b2bfc 100644
--- a/airflow/providers/amazon/aws/transfers/mongo_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py
@@ -59,7 +59,6 @@ class MongoToS3Operator(BaseOperator):
     template_fields = ('s3_bucket', 's3_key', 'mongo_query', 'mongo_collection')
     ui_color = '#589636'
     template_fields_renderers = {"mongo_query": "py"}
-    # pylint: disable=too-many-instance-attributes
 
     def __init__(
         self,
diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
index 838d22d..bbd2c40 100644
--- a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
@@ -75,7 +75,7 @@ class RedshiftToS3Operator(BaseOperator):
     template_ext = ()
     ui_color = '#ededed'
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         *,
         s3_bucket: str,
diff --git a/airflow/providers/apache/beam/hooks/beam.py b/airflow/providers/apache/beam/hooks/beam.py
index 1cf7a8b..4bd27eb 100644
--- a/airflow/providers/apache/beam/hooks/beam.py
+++ b/airflow/providers/apache/beam/hooks/beam.py
@@ -94,7 +94,7 @@ class BeamCommandRunner(LoggingMixin):
         self.log.info("Running command: %s", " ".join(shlex.quote(c) for c in cmd))
         self.process_line_callback = process_line_callback
         self.job_id: Optional[str] = None
-        # pylint: disable=consider-using-with
+
         self._proc = subprocess.Popen(
             cmd,
             shell=False,
@@ -185,7 +185,7 @@ class BeamHook(BaseHook):
         )
         cmd_runner.wait_for_done()
 
-    def start_python_pipeline(  # pylint: disable=too-many-arguments
+    def start_python_pipeline(
         self,
         variables: dict,
         py_file: str,
diff --git a/airflow/providers/apache/beam/operators/beam.py b/airflow/providers/apache/beam/operators/beam.py
index 7ff87bf..da57fea 100644
--- a/airflow/providers/apache/beam/operators/beam.py
+++ b/airflow/providers/apache/beam/operators/beam.py
@@ -234,9 +234,7 @@ class BeamRunPythonPipelineOperator(BaseOperator, BeamDataflowMixin):
         with ExitStack() as exit_stack:
             if self.py_file.lower().startswith("gs://"):
                 gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
-                tmp_gcs_file = exit_stack.enter_context(  # pylint: disable=no-member
-                    gcs_hook.provide_file(object_url=self.py_file)
-                )
+                tmp_gcs_file = exit_stack.enter_context(gcs_hook.provide_file(object_url=self.py_file))
                 self.py_file = tmp_gcs_file.name
 
             self.beam_hook.start_python_pipeline(
@@ -250,7 +248,7 @@ class BeamRunPythonPipelineOperator(BaseOperator, BeamDataflowMixin):
             )
 
             if is_dataflow:
-                self.dataflow_hook.wait_for_done(  # pylint: disable=no-value-for-parameter
+                self.dataflow_hook.wait_for_done(
                     job_name=dataflow_job_name,
                     location=self.dataflow_config.location,
                     job_id=self.dataflow_job_id,
@@ -268,7 +266,6 @@ class BeamRunPythonPipelineOperator(BaseOperator, BeamDataflowMixin):
             )
 
 
-# pylint: disable=too-many-instance-attributes
 class BeamRunJavaPipelineOperator(BaseOperator, BeamDataflowMixin):
     """
     Launching Apache Beam pipelines written in Java.
@@ -391,9 +388,7 @@ class BeamRunJavaPipelineOperator(BaseOperator, BeamDataflowMixin):
         with ExitStack() as exit_stack:
             if self.jar.lower().startswith("gs://"):
                 gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to)
-                tmp_gcs_file = exit_stack.enter_context(  # pylint: disable=no-member
-                    gcs_hook.provide_file(object_url=self.jar)
-                )
+                tmp_gcs_file = exit_stack.enter_context(gcs_hook.provide_file(object_url=self.jar))
                 self.jar = tmp_gcs_file.name
 
             if is_dataflow:
@@ -405,7 +400,7 @@ class BeamRunJavaPipelineOperator(BaseOperator, BeamDataflowMixin):
                         # This method is wrapped by @_fallback_to_project_id_from_variables decorator which
                         # fallback project_id value from variables and raise error if project_id is
                         # defined both in variables and as parameter (here is already defined in variables)
-                        self.dataflow_hook.is_job_dataflow_running(  # pylint: disable=no-value-for-parameter
+                        self.dataflow_hook.is_job_dataflow_running(
                             name=self.dataflow_config.job_name,
                             variables=pipeline_options,
                         )
@@ -416,7 +411,7 @@ class BeamRunJavaPipelineOperator(BaseOperator, BeamDataflowMixin):
                         # This method is wrapped by @_fallback_to_project_id_from_variables decorator which
                         # fallback project_id value from variables and raise error if project_id is
                         # defined both in variables and as parameter (here is already defined in variables)
-                        # pylint: disable=no-value-for-parameter
+
                         is_running = self.dataflow_hook.is_job_dataflow_running(
                             name=self.dataflow_config.job_name,
                             variables=pipeline_options,
diff --git a/airflow/providers/apache/cassandra/hooks/cassandra.py b/airflow/providers/apache/cassandra/hooks/cassandra.py
index 26265d3..461c857 100644
--- a/airflow/providers/apache/cassandra/hooks/cassandra.py
+++ b/airflow/providers/apache/cassandra/hooks/cassandra.py
@@ -210,5 +210,5 @@ class CassandraHook(BaseHook, LoggingMixin):
         try:
             result = self.get_conn().execute(query, keys)
             return result.one() is not None
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             return False
diff --git a/airflow/providers/apache/druid/transfers/hive_to_druid.py b/airflow/providers/apache/druid/transfers/hive_to_druid.py
index 7c77ae5..dcd4363 100644
--- a/airflow/providers/apache/druid/transfers/hive_to_druid.py
+++ b/airflow/providers/apache/druid/transfers/hive_to_druid.py
@@ -79,7 +79,7 @@ class HiveToDruidOperator(BaseOperator):
     template_fields = ('sql', 'intervals')
     template_ext = ('.sql',)
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         *,
         sql: str,
diff --git a/airflow/providers/apache/hdfs/hooks/hdfs.py b/airflow/providers/apache/hdfs/hooks/hdfs.py
index 5cd3523..034490d 100644
--- a/airflow/providers/apache/hdfs/hooks/hdfs.py
+++ b/airflow/providers/apache/hdfs/hooks/hdfs.py
@@ -23,7 +23,7 @@ from airflow.exceptions import AirflowException
 from airflow.hooks.base import BaseHook
 
 try:
-    from snakebite.client import AutoConfigClient, Client, HAClient, Namenode  # pylint: disable=syntax-error
+    from snakebite.client import AutoConfigClient, Client, HAClient, Namenode
 
     snakebite_loaded = True
 except ImportError:
diff --git a/airflow/providers/apache/hdfs/hooks/webhdfs.py b/airflow/providers/apache/hdfs/hooks/webhdfs.py
index 81750f8..fce445f 100644
--- a/airflow/providers/apache/hdfs/hooks/webhdfs.py
+++ b/airflow/providers/apache/hdfs/hooks/webhdfs.py
@@ -32,7 +32,7 @@ log = logging.getLogger(__name__)
 _kerberos_security_mode = conf.get("core", "security") == "kerberos"
 if _kerberos_security_mode:
     try:
-        from hdfs.ext.kerberos import KerberosClient  # pylint: disable=ungrouped-imports
+        from hdfs.ext.kerberos import KerberosClient
     except ImportError:
         log.error("Could not load the Kerberos extension for the WebHDFSHook.")
         raise
diff --git a/airflow/providers/apache/hdfs/sensors/hdfs.py b/airflow/providers/apache/hdfs/sensors/hdfs.py
index fb296eb..a378776 100644
--- a/airflow/providers/apache/hdfs/sensors/hdfs.py
+++ b/airflow/providers/apache/hdfs/sensors/hdfs.py
@@ -130,7 +130,7 @@ class HdfsSensor(BaseSensorOperator):
             result = self.filter_for_ignored_ext(result, self.ignored_ext, self.ignore_copying)
             result = self.filter_for_filesize(result, self.file_size)
             return bool(result)
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             e = sys.exc_info()
             self.log.debug("Caught an exception !: %s", str(e))
             return False
diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py
index e2f3030..bcb31f9 100644
--- a/airflow/providers/apache/hive/hooks/hive.py
+++ b/airflow/providers/apache/hive/hooks/hive.py
@@ -777,7 +777,7 @@ class HiveMetastoreHook(BaseHook):
         try:
             self.get_table(table_name, db)
             return True
-        except Exception:  # pylint: disable=broad-except
+        except Exception:
             return False
 
     def drop_partitions(self, table_name, part_vals, delete_data=False, db='default'):
@@ -838,7 +838,7 @@ class HiveServer2Hook(DbApiHook):
         """Returns a Hive connection object."""
         username: Optional[str] = None
         password: Optional[str] = None
-        # pylint: disable=no-member
+
         db = self.get_connection(self.hiveserver2_conn_id)  # type: ignore
 
         auth_mechanism = db.extra_dejson.get('authMechanism', 'NONE')
@@ -874,8 +874,6 @@ class HiveServer2Hook(DbApiHook):
             database=schema or db.schema or 'default',
         )
 
-        # pylint: enable=no-member
-
     def _get_results(
         self,
         hql: Union[str, str, List[str]],
@@ -893,9 +891,9 @@ class HiveServer2Hook(DbApiHook):
             cur.arraysize = fetch_size or 1000
 
             # not all query services (e.g. impala AIRFLOW-4434) support the set command
-            # pylint: disable=no-member
+
             db = self.get_connection(self.hiveserver2_conn_id)  # type: ignore
-            # pylint: enable=no-member
+
             if db.extra_dejson.get('run_set_variable_statements', True):
                 env_context = get_context_from_env_var()
                 if hive_conf:
diff --git a/airflow/providers/apache/hive/operators/hive.py b/airflow/providers/apache/hive/operators/hive.py
index a0d8a66..7017ea1 100644
--- a/airflow/providers/apache/hive/operators/hive.py
+++ b/airflow/providers/apache/hive/operators/hive.py
@@ -77,7 +77,6 @@ class HiveOperator(BaseOperator):
     )
     ui_color = '#f0e4ec'
 
-    # pylint: disable=too-many-arguments
     def __init__(
         self,
         *,
diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
index 1c404ed..01c8b19 100644
--- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
@@ -15,7 +15,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-# pylint: disable=no-member
+
 """This module contains operator to move data from MSSQL to Hive."""
 
 from collections import OrderedDict
@@ -100,7 +100,6 @@ class MsSqlToHiveOperator(BaseOperator):
         self.tblproperties = tblproperties
 
     @classmethod
-    # pylint: disable=c-extension-no-member,no-member
     def type_map(cls, mssql_type: int) -> str:
         """Maps MsSQL type to Hive type."""
         map_dict = {
diff --git a/airflow/providers/apache/hive/transfers/mysql_to_hive.py b/airflow/providers/apache/hive/transfers/mysql_to_hive.py
index 9c828aa..381ea2e 100644
--- a/airflow/providers/apache/hive/transfers/mysql_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/mysql_to_hive.py
@@ -81,7 +81,7 @@ class MySqlToHiveOperator(BaseOperator):
     template_ext = ('.sql',)
     ui_color = '#a0e08c'
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         *,
         sql: str,
diff --git a/airflow/providers/apache/hive/transfers/s3_to_hive.py b/airflow/providers/apache/hive/transfers/s3_to_hive.py
index 4e833d7..04b0fb5 100644
--- a/airflow/providers/apache/hive/transfers/s3_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/s3_to_hive.py
@@ -32,7 +32,7 @@ from airflow.providers.apache.hive.hooks.hive import HiveCliHook
 from airflow.utils.compression import uncompress_file
 
 
-class S3ToHiveOperator(BaseOperator):  # pylint: disable=too-many-instance-attributes
+class S3ToHiveOperator(BaseOperator):
     """
     Moves data from S3 to Hive. The operator downloads a file from S3,
     stores the file locally before loading it into a Hive table.
@@ -103,7 +103,7 @@ class S3ToHiveOperator(BaseOperator):  # pylint: disable=too-many-instance-attri
     template_ext = ()
     ui_color = '#a0e08c'
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         *,
         s3_key: str,
diff --git a/airflow/providers/apache/kylin/operators/kylin_cube.py b/airflow/providers/apache/kylin/operators/kylin_cube.py
index f5730d8..76b5708 100644
--- a/airflow/providers/apache/kylin/operators/kylin_cube.py
+++ b/airflow/providers/apache/kylin/operators/kylin_cube.py
@@ -108,7 +108,6 @@ class KylinCubeOperator(BaseOperator):
     }
     jobs_end_status = {"FINISHED", "ERROR", "DISCARDED", "KILLED", "SUICIDAL", "STOPPED"}
 
-    # pylint: disable=too-many-arguments,inconsistent-return-statements
     def __init__(
         self,
         *,
diff --git a/airflow/providers/apache/livy/hooks/livy.py b/airflow/providers/apache/livy/hooks/livy.py
index 75d08af..2aab543 100644
--- a/airflow/providers/apache/livy/hooks/livy.py
+++ b/airflow/providers/apache/livy/hooks/livy.py
@@ -281,7 +281,6 @@ class LivyHook(HttpHook, LoggingMixin):
         Build the post batch request body.
         For more information about the format refer to
         .. seealso:: https://livy.apache.org/docs/latest/rest-api.html
-
         :param file: Path of the file containing the application to execute (required).
         :type file: str
         :param proxy_user: User to impersonate when running the job.
@@ -317,8 +316,6 @@ class LivyHook(HttpHook, LoggingMixin):
         :return: request body
         :rtype: dict
         """
-        # pylint: disable-msg=too-many-arguments
-
         body: Dict[str, Any] = {'file': file}
 
         if proxy_user:
diff --git a/airflow/providers/apache/livy/operators/livy.py b/airflow/providers/apache/livy/operators/livy.py
index 6b06754..9936cf7 100644
--- a/airflow/providers/apache/livy/operators/livy.py
+++ b/airflow/providers/apache/livy/operators/livy.py
@@ -95,7 +95,6 @@ class LivyOperator(BaseOperator):
         extra_options: Optional[Dict[str, Any]] = None,
         **kwargs: Any,
     ) -> None:
-        # pylint: disable-msg=too-many-arguments
 
         super().__init__(**kwargs)
 
diff --git a/airflow/providers/apache/pinot/hooks/pinot.py b/airflow/providers/apache/pinot/hooks/pinot.py
index b20d026..f48fd1b 100644
--- a/airflow/providers/apache/pinot/hooks/pinot.py
+++ b/airflow/providers/apache/pinot/hooks/pinot.py
@@ -107,7 +107,6 @@ class PinotAdminHook(BaseHook):
             cmd += ["-exec"]
         self.run_cli(cmd)
 
-    # pylint: disable=too-many-arguments
     def create_segment(
         self,
         generator_config_file: Optional[str] = None,
@@ -262,9 +261,8 @@ class PinotDbApiHook(DbApiHook):
 
     def get_conn(self) -> Any:
         """Establish a connection to pinot broker through pinot dbapi."""
-        # pylint: disable=no-member
         conn = self.get_connection(self.pinot_broker_conn_id)  # type: ignore
-        # pylint: enable=no-member
+
         pinot_broker_conn = connect(
             host=conn.host,
             port=conn.port,
diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc.py b/airflow/providers/apache/spark/hooks/spark_jdbc.py
index 9638f8c..d5dfcdc 100644
--- a/airflow/providers/apache/spark/hooks/spark_jdbc.py
+++ b/airflow/providers/apache/spark/hooks/spark_jdbc.py
@@ -23,7 +23,6 @@ from airflow.exceptions import AirflowException
 from airflow.providers.apache.spark.hooks.spark_submit import SparkSubmitHook
 
 
-# pylint: disable=too-many-instance-attributes
 class SparkJDBCHook(SparkSubmitHook):
     """
     This hook extends the SparkSubmitHook specifically for performing data
@@ -118,7 +117,6 @@ class SparkJDBCHook(SparkSubmitHook):
     conn_type = 'spark_jdbc'
     hook_name = 'Spark JDBC'
 
-    # pylint: disable=too-many-arguments,too-many-locals
     def __init__(
         self,
         spark_app_name: str = 'airflow-spark-jdbc',
diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc_script.py b/airflow/providers/apache/spark/hooks/spark_jdbc_script.py
index 0a0a720..c354de6 100644
--- a/airflow/providers/apache/spark/hooks/spark_jdbc_script.py
+++ b/airflow/providers/apache/spark/hooks/spark_jdbc_script.py
@@ -54,7 +54,6 @@ def set_common_options(
     return spark_source
 
 
-# pylint: disable=too-many-arguments
 def spark_write_to_jdbc(
     spark_session: SparkSession,
     url: str,
@@ -87,7 +86,6 @@ def spark_write_to_jdbc(
     writer.save(mode=save_mode)
 
 
-# pylint: disable=too-many-arguments
 def spark_read_from_jdbc(
     spark_session: SparkSession,
     url: str,
diff --git a/airflow/providers/apache/spark/hooks/spark_sql.py b/airflow/providers/apache/spark/hooks/spark_sql.py
index b690f2c..945de8f 100644
--- a/airflow/providers/apache/spark/hooks/spark_sql.py
+++ b/airflow/providers/apache/spark/hooks/spark_sql.py
@@ -61,7 +61,6 @@ class SparkSqlHook(BaseHook):
     conn_type = 'spark_sql'
     hook_name = 'Spark SQL'
 
-    # pylint: disable=too-many-arguments
     def __init__(
         self,
         sql: str,
@@ -158,7 +157,7 @@ class SparkSqlHook(BaseHook):
         :type kwargs: dict
         """
         spark_sql_cmd = self._prepare_command(cmd)
-        # pylint: disable=consider-using-with
+
         self._sp = subprocess.Popen(spark_sql_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
 
         for line in iter(self._sp.stdout):  # type: ignore
diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py
index d27fa64..821604b 100644
--- a/airflow/providers/apache/spark/hooks/spark_submit.py
+++ b/airflow/providers/apache/spark/hooks/spark_submit.py
@@ -34,7 +34,6 @@ except (ImportError, NameError):
     pass
 
 
-# pylint: disable=too-many-instance-attributes
 class SparkSubmitHook(BaseHook, LoggingMixin):
     """
     This hook is a wrapper around the spark-submit binary to kick off a spark-submit job.
@@ -118,7 +117,6 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
             "relabeling": {},
         }
 
-    # pylint: disable=too-many-arguments,too-many-locals,too-many-branches
     def __init__(
         self,
         conf: Optional[Dict[str, Any]] = None,
@@ -427,7 +425,6 @@ class SparkSubmitHook(BaseHook, LoggingMixin):
             env.update(self._env)
             kwargs["env"] = env
 
-        # pylint: disable=consider-using-with
         self._submit_sp = subprocess.Popen(
             spark_submit_cmd,
             stdout=subprocess.PIPE,
diff --git a/airflow/providers/apache/spark/operators/spark_jdbc.py b/airflow/providers/apache/spark/operators/spark_jdbc.py
index b15ffca..280d428 100644
--- a/airflow/providers/apache/spark/operators/spark_jdbc.py
+++ b/airflow/providers/apache/spark/operators/spark_jdbc.py
@@ -22,7 +22,6 @@ from airflow.providers.apache.spark.hooks.spark_jdbc import SparkJDBCHook
 from airflow.providers.apache.spark.operators.spark_submit import SparkSubmitOperator
 
 
-# pylint: disable=too-many-instance-attributes
 class SparkJDBCOperator(SparkSubmitOperator):
     """
     This operator extends the SparkSubmitOperator specifically for performing data
@@ -118,7 +117,6 @@ class SparkJDBCOperator(SparkSubmitOperator):
                                       types.
     """
 
-    # pylint: disable=too-many-arguments,too-many-locals
     def __init__(
         self,
         *,
diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/airflow/providers/apache/spark/operators/spark_sql.py
index 6c52fa2..536d830 100644
--- a/airflow/providers/apache/spark/operators/spark_sql.py
+++ b/airflow/providers/apache/spark/operators/spark_sql.py
@@ -61,7 +61,6 @@ class SparkSqlOperator(BaseOperator):
     template_fields = ["_sql"]
     template_ext = [".sql", ".hql"]
 
-    # pylint: disable=too-many-arguments
     def __init__(
         self,
         *,
diff --git a/airflow/providers/apache/spark/operators/spark_submit.py b/airflow/providers/apache/spark/operators/spark_submit.py
index 090f104..f1e933c 100644
--- a/airflow/providers/apache/spark/operators/spark_submit.py
+++ b/airflow/providers/apache/spark/operators/spark_submit.py
@@ -23,7 +23,6 @@ from airflow.providers.apache.spark.hooks.spark_submit import SparkSubmitHook
 from airflow.settings import WEB_COLORS
 
 
-# pylint: disable=too-many-instance-attributes
 class SparkSubmitOperator(BaseOperator):
     """
     This hook is a wrapper around the spark-submit binary to kick off a spark-submit job.
@@ -113,7 +112,6 @@ class SparkSubmitOperator(BaseOperator):
     )
     ui_color = WEB_COLORS['LIGHTORANGE']
 
-    # pylint: disable=too-many-arguments,too-many-locals
     def __init__(
         self,
         *,
diff --git a/airflow/providers/apache/sqoop/hooks/sqoop.py b/airflow/providers/apache/sqoop/hooks/sqoop.py
index ed5378a..21684a5 100644
--- a/airflow/providers/apache/sqoop/hooks/sqoop.py
+++ b/airflow/providers/apache/sqoop/hooks/sqoop.py
@@ -206,7 +206,6 @@ class SqoopHook(BaseHook):
 
         return cmd
 
-    # pylint: disable=too-many-arguments
     def import_table(
         self,
         table: str,
@@ -280,7 +279,6 @@ class SqoopHook(BaseHook):
 
         self.popen(cmd)
 
-    # pylint: disable=too-many-arguments
     def _export_cmd(
         self,
         table: str,
@@ -348,7 +346,6 @@ class SqoopHook(BaseHook):
 
         return cmd
 
-    # pylint: disable=too-many-arguments
     def export_table(
         self,
         table: str,
diff --git a/airflow/providers/apache/sqoop/operators/sqoop.py b/airflow/providers/apache/sqoop/operators/sqoop.py
index a790e49..242ed25 100644
--- a/airflow/providers/apache/sqoop/operators/sqoop.py
+++ b/airflow/providers/apache/sqoop/operators/sqoop.py
@@ -26,7 +26,6 @@ from airflow.models import BaseOperator
 from airflow.providers.apache.sqoop.hooks.sqoop import SqoopHook
 
 
-# pylint: disable=too-many-instance-attributes
 class SqoopOperator(BaseOperator):
     """
     Execute a Sqoop job.
@@ -108,7 +107,6 @@ class SqoopOperator(BaseOperator):
     )
     ui_color = '#7D8CA4'
 
-    # pylint: disable=too-many-arguments,too-many-locals
     def __init__(
         self,
         *,
diff --git a/airflow/providers/asana/hooks/asana.py b/airflow/providers/asana/hooks/asana.py
index ca8c4c3..b1623f8 100644
--- a/airflow/providers/asana/hooks/asana.py
+++ b/airflow/providers/asana/hooks/asana.py
@@ -91,7 +91,7 @@ class AsanaHook(BaseHook):
         """
         merged_params = self._merge_create_task_parameters(task_name, params)
         self._validate_create_task_parameters(merged_params)
-        response = self.client.tasks.create(params=merged_params)  # pylint: disable=no-member
+        response = self.client.tasks.create(params=merged_params)
         return response
 
     def _merge_create_task_parameters(self, task_name: str, task_params: dict) -> dict:
@@ -134,7 +134,7 @@ class AsanaHook(BaseHook):
         :return: A dict containing the response from Asana
         """
         try:
-            response = self.client.tasks.delete_task(task_id)  # pylint: disable=no-member
+            response = self.client.tasks.delete_task(task_id)
             return response
         except NotFoundError:
             self.log.info("Asana task %s not found for deletion.", task_id)
@@ -150,7 +150,7 @@ class AsanaHook(BaseHook):
         """
         merged_params = self._merge_find_task_parameters(params)
         self._validate_find_task_parameters(merged_params)
-        response = self.client.tasks.find_all(params=merged_params)  # pylint: disable=no-member
+        response = self.client.tasks.find_all(params=merged_params)
         return list(response)
 
     def _merge_find_task_parameters(self, search_parameters: dict) -> dict:
@@ -198,7 +198,7 @@ class AsanaHook(BaseHook):
             https://developers.asana.com/docs/update-a-task
         :return: A dict containing the updated task's attributes
         """
-        response = self.client.tasks.update(task_id, params)  # pylint: disable=no-member
+        response = self.client.tasks.update(task_id, params)
         return response
 
     def create_project(self, params: dict) -> dict:
@@ -212,7 +212,7 @@ class AsanaHook(BaseHook):
         """
         merged_params = self._merge_project_parameters(params)
         self._validate_create_project_parameters(merged_params)
-        response = self.client.projects.create(merged_params)  # pylint: disable=no-member
+        response = self.client.projects.create(merged_params)
         return response
 
     @staticmethod
@@ -251,7 +251,7 @@ class AsanaHook(BaseHook):
         :return: A list of dicts containing attributes of matching Asana projects
         """
         merged_params = self._merge_project_parameters(params)
-        response = self.client.projects.find_all(merged_params)  # pylint: disable=no-member
+        response = self.client.projects.find_all(merged_params)
         return list(response)
 
     def update_project(self, project_id: str, params: dict) -> dict:
@@ -264,7 +264,7 @@ class AsanaHook(BaseHook):
             for a list of possible parameters
         :return: A dict containing the updated project's attributes
         """
-        response = self.client.projects.update(project_id, params)  # pylint: disable=no-member
+        response = self.client.projects.update(project_id, params)
         return response
 
     def delete_project(self, project_id: str) -> dict:
@@ -275,7 +275,7 @@ class AsanaHook(BaseHook):
         :return: A dict containing the response from Asana
         """
         try:
-            response = self.client.projects.delete(project_id)  # pylint: disable=no-member
+            response = self.client.projects.delete(project_id)
             return response
         except NotFoundError:
             self.log.info("Asana project %s not found for deletion.", project_id)
diff --git a/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py b/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py
index dae4c8a..7c28105 100644
--- a/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py
+++ b/airflow/providers/cncf/kubernetes/backcompat/backwards_compat_converters.py
@@ -42,7 +42,7 @@ def _convert_from_dict(obj, new_class):
         return obj
     elif isinstance(obj, dict):
         api_client = ApiClient()
-        return api_client._ApiClient__deserialize_model(obj, new_class)  # pylint: disable=W0212
+        return api_client._ApiClient__deserialize_model(obj, new_class)
     else:
         raise AirflowException(f"Expected dict or {new_class}, got {type(obj)}")
 
diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
index 32cc9c9..af1828d 100644
--- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
+++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
@@ -53,7 +53,7 @@ if TYPE_CHECKING:
     import jinja2
 
 
-class KubernetesPodOperator(BaseOperator):  # pylint: disable=too-many-instance-attributes
+class KubernetesPodOperator(BaseOperator):
     """
     Execute a task in a Kubernetes Pod
 
@@ -173,7 +173,7 @@ class KubernetesPodOperator(BaseOperator):  # pylint: disable=too-many-instance-
     )
 
     # fmt: off
-    def __init__(  # pylint: disable=too-many-arguments,too-many-locals
+    def __init__(
         # fmt: on
         self,
         *,
diff --git a/airflow/providers/databricks/hooks/databricks.py b/airflow/providers/databricks/hooks/databricks.py
index 350e5b4..f83191e 100644
--- a/airflow/providers/databricks/hooks/databricks.py
+++ b/airflow/providers/databricks/hooks/databricks.py
@@ -86,7 +86,7 @@ class RunState:
         return str(self.__dict__)
 
 
-class DatabricksHook(BaseHook):  # noqa
+class DatabricksHook(BaseHook):
     """
     Interact with Databricks.
 
diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py
index ed37f75..8956b43 100644
--- a/airflow/providers/databricks/operators/databricks.py
+++ b/airflow/providers/databricks/operators/databricks.py
@@ -247,7 +247,6 @@ class DatabricksSubmitRunOperator(BaseOperator):
     ui_color = '#1CB1C2'
     ui_fgcolor = '#fff'
 
-    # pylint: disable=too-many-arguments
     def __init__(
         self,
         *,
@@ -457,7 +456,6 @@ class DatabricksRunNowOperator(BaseOperator):
     ui_color = '#1CB1C2'
     ui_fgcolor = '#fff'
 
-    # pylint: disable=too-many-arguments
     def __init__(
         self,
         *,
diff --git a/airflow/providers/datadog/hooks/datadog.py b/airflow/providers/datadog/hooks/datadog.py
index 2b993b0..538a775 100644
--- a/airflow/providers/datadog/hooks/datadog.py
+++ b/airflow/providers/datadog/hooks/datadog.py
@@ -110,7 +110,6 @@ class DatadogHook(BaseHook, LoggingMixin):
         self.validate_response(response)
         return response
 
-    # pylint: disable=too-many-arguments
     def post_event(
         self,
         title: str,
diff --git a/airflow/providers/docker/example_dags/example_docker_copy_data.py b/airflow/providers/docker/example_dags/example_docker_copy_data.py
index 7adb5ca..9dc8478 100644
--- a/airflow/providers/docker/example_dags/example_docker_copy_data.py
+++ b/airflow/providers/docker/example_dags/example_docker_copy_data.py
@@ -15,7 +15,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-# pylint: disable=missing-function-docstring
+
 """
 This sample "listen to directory". move the new file and print it,
 using docker-containers.
diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py
index 29a6f79..7b5d8c6 100644
--- a/airflow/providers/docker/operators/docker.py
+++ b/airflow/providers/docker/operators/docker.py
@@ -28,7 +28,6 @@ from airflow.models import BaseOperator
 from airflow.providers.docker.hooks.docker import DockerHook
 
 
-# pylint: disable=too-many-instance-attributes
 class DockerOperator(BaseOperator):
     """
     Execute a command inside a docker container.
@@ -135,7 +134,6 @@ class DockerOperator(BaseOperator):
         '.bash',
     )
 
-    # pylint: disable=too-many-arguments,too-many-locals
     def __init__(
         self,
         *,
@@ -296,7 +294,7 @@ class DockerOperator(BaseOperator):
             raise Exception("The 'cli' should be initialized before!")
 
         # Pull the docker image if `force_pull` is set or image does not exist locally
-        # pylint: disable=too-many-nested-blocks
+
         if self.force_pull or not self.cli.images(name=self.image):
             self.log.info('Pulling docker image %s', self.image)
             latest_status = {}
@@ -354,7 +352,7 @@ class DockerOperator(BaseOperator):
                 ca_cert=self.tls_ca_cert,
                 client_cert=(self.tls_client_cert, self.tls_client_key),
                 verify=True,
-                ssl_version=self.tls_ssl_version,  # noqa
+                ssl_version=self.tls_ssl_version,
                 assert_hostname=self.tls_hostname,
             )
             self.docker_url = self.docker_url.replace('tcp://', 'https://')
diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/airflow/providers/elasticsearch/log/es_task_handler.py
index ae08ecd..e20038c 100644
--- a/airflow/providers/elasticsearch/log/es_task_handler.py
+++ b/airflow/providers/elasticsearch/log/es_task_handler.py
@@ -63,7 +63,7 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin):
     MAX_LINE_PER_PAGE = 1000
     LOG_NAME = 'Elasticsearch'
 
-    def __init__(  # pylint: disable=too-many-arguments
+    def __init__(
         self,
         base_log_folder: str,
         filename_template: str,
@@ -210,9 +210,9 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin):
         # if we change the formatter style from '%' to '{' or '$', this will still work
         if self.json_format:
             try:
-                # pylint: disable=protected-access
+
                 return self.formatter._style.format(_ESJsonLogFmt(self.json_fields, **log_line.to_dict()))
-            except Exception:  # noqa pylint: disable=broad-except
+            except Exception:
                 pass
 
         # Just a safe-guard to preserve backwards-compatibility
@@ -243,7 +243,7 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin):
                     )
                 else:
                     metadata['max_offset'] = 0
-            except Exception:  # pylint: disable=broad-except
+            except Exception:
                 self.log.exception('Could not get current log size with log_id: %s', log_id)
 
         logs = []
@@ -251,7 +251,7 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin):
             try:
 
                 logs = search[self.MAX_LINE_PER_PAGE * self.PAGE : self.MAX_LINE_PER_PAGE].execute()
-            except Exception:  # pylint: disable=broad-except
+            except Exception:
                 self.log.exception('Could not read log with log_id: %s', log_id)
 
         return logs
@@ -266,7 +266,7 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin):
 
         if self.json_format:
             self.formatter = JSONFormatter(
-                fmt=self.formatter._fmt,  # pylint: disable=protected-access
+                fmt=self.formatter._fmt,
                 json_fields=self.json_fields,
                 extras={
                     'dag_id': str(ti.dag_id),
@@ -311,7 +311,7 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin):
         # Reopen the file stream, because FileHandler.close() would be called
         # first in logging.shutdown() and the stream in it would be set to None.
         if self.handler.stream is None or self.handler.stream.closed:
-            self.handler.stream = self.handler._open()  # pylint: disable=protected-access
+            self.handler.stream = self.handler._open()
 
         # Mark the end of file using end of log mark,
         # so we know where to stop while auto-tailing.
diff --git a/airflow/providers/ftp/hooks/ftp.py b/airflow/providers/ftp/hooks/ftp.py
index a03e461..2db2a33 100644
--- a/airflow/providers/ftp/hooks/ftp.py
+++ b/airflow/providers/ftp/hooks/ftp.py
@@ -174,7 +174,7 @@ class FTPHook(BaseHook):
         # file-like buffer
         if not callback:
             if is_path:
-                # pylint: disable=consider-using-with
+
                 output_handle = open(local_full_path_or_buffer, 'wb')
             else:
                 output_handle = local_full_path_or_buffer
@@ -210,7 +210,7 @@ class FTPHook(BaseHook):
         is_path = isinstance(local_full_path_or_buffer, str)
 
         if is_path:
-            # pylint: disable=consider-using-with
+
             input_handle = open(local_full_path_or_buffer, 'rb')
         else:
             input_handle = local_full_path_or_buffer
diff --git a/airflow/providers/google/cloud/example_dags/example_automl_tables.py b/airflow/providers/google/cloud/example_dags/example_automl_tables.py
index 4f35552..39f479b 100644
--- a/airflow/providers/google/cloud/example_dags/example_automl_tables.py
+++ b/airflow/providers/google/cloud/example_dags/example_automl_tables.py
@@ -176,14 +176,14 @@ with models.DAG(
     )
 
     (
-        create_dataset_task  # noqa
-        >> import_dataset_task  # noqa
-        >> list_tables_spec_task  # noqa
-        >> list_columns_spec_task  # noqa
-        >> update_dataset_task  # noqa
-        >> create_model_task  # noqa
-        >> delete_model_task  # noqa
-        >> delete_datasets_task  # noqa
+        create_dataset_task
+        >> import_dataset_task
+        >> list_tables_spec_task
+        >> list_columns_spec_task
+        >> update_dataset_task
+        >> create_model_task
+        >> delete_model_task
+        >> delete_datasets_task
     )
 
 
@@ -243,12 +243,12 @@ with models.DAG(
     # [END howto_operator_delete_dataset]
 
     (
-        create_dataset_task  # noqa
-        >> import_dataset_task  # noqa
-        >> list_tables_spec_task  # noqa
-        >> list_columns_spec_task  # noqa
-        >> list_datasets_task  # noqa
-        >> delete_datasets_task  # noqa
+        create_dataset_task
+        >> import_dataset_task
+        >> list_tables_spec_task
+        >> list_columns_spec_task
+        >> list_datasets_task
+        >> delete_datasets_task
     )
 
 with models.DAG(
diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
index b0e2b86..965f3f9 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
@@ -105,8 +105,8 @@ with models.DAG(
     # [END howto_bigquery_delete_data_transfer]
 
     (
-        gcp_bigquery_create_transfer  # noqa
-        >> gcp_bigquery_start_transfer  # noqa
-        >> gcp_run_sensor  # noqa
-        >> gcp_bigquery_delete_transfer  # noqa
+        gcp_bigquery_create_transfer
+        >> gcp_bigquery_start_transfer
+        >> gcp_run_sensor
+        >> gcp_bigquery_delete_transfer
     )
diff --git a/airflow/providers/google/cloud/example_dags/example_bigtable.py b/airflow/providers/google/cloud/example_dags/example_bigtable.py
index fc62cdf..2f3cfd0 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigtable.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigtable.py
@@ -30,7 +30,7 @@ This DAG relies on the following environment variables:
 * CBT_INSTANCE_ID - desired ID of a Cloud Bigtable instance
 * CBT_INSTANCE_DISPLAY_NAME - desired human-readable display name of the Instance
 * CBT_INSTANCE_TYPE - type of the Instance, e.g. 1 for DEVELOPMENT
-    See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance # noqa E501  # pylint: disable=line-too-long
+    See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance # noqa E501
 * CBT_INSTANCE_LABELS - labels to add for the Instance
 * CBT_CLUSTER_ID - desired ID of the main Cluster created for the Instance
 * CBT_CLUSTER_ZONE - zone in which main Cluster will be created. e.g. europe-west1-b
@@ -38,7 +38,7 @@ This DAG relies on the following environment variables:
 * CBT_CLUSTER_NODES - initial amount of nodes of the Cluster
 * CBT_CLUSTER_NODES_UPDATED - amount of nodes for BigtableClusterUpdateOperator
 * CBT_CLUSTER_STORAGE_TYPE - storage for the Cluster, e.g. 1 for SSD
-    See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.cluster # noqa E501  # pylint: disable=line-too-long
+    See https://googleapis.github.io/google-cloud-python/latest/bigtable/instance.html#google.cloud.bigtable.instance.Instance.cluster # noqa E501
 * CBT_TABLE_ID - desired ID of the Table
 * CBT_POKE_INTERVAL - number of seconds between every attempt of Sensor check
 
diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_build.py b/airflow/providers/google/cloud/example_dags/example_cloud_build.py
index cfaf364..1591fe0 100644
--- a/airflow/providers/google/cloud/example_dags/example_cloud_build.py
+++ b/airflow/providers/google/cloud/example_dags/example_cloud_build.py
@@ -110,6 +110,6 @@ with models.DAG(
         params={'name': 'Airflow'},
     )
     # [END howto_operator_gcp_create_build_from_yaml_body]
-    create_build_from_storage >> create_build_from_storage_result  # pylint: disable=pointless-statement
+    create_build_from_storage >> create_build_from_storage_result
 
-    create_build_from_repo >> create_build_from_repo_result  # pylint: disable=pointless-statement
+    create_build_from_repo >> create_build_from_repo_result
diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_sql.py b/airflow/providers/google/cloud/example_dags/example_cloud_sql.py
index 286ecf3..d1d9fd4 100644
--- a/airflow/providers/google/cloud/example_dags/example_cloud_sql.py
+++ b/airflow/providers/google/cloud/example_dags/example_cloud_sql.py
@@ -309,26 +309,26 @@ with models.DAG(
     )
 
     (
-        sql_instance_create_task  # noqa
-        >> sql_instance_create_2_task  # noqa
-        >> sql_instance_read_replica_create  # noqa
-        >> sql_instance_patch_task  # noqa
-        >> sql_instance_patch_task2  # noqa
-        >> sql_db_create_task  # noqa
-        >> sql_db_create_task2  # noqa
-        >> sql_db_patch_task  # noqa
-        >> sql_db_patch_task2  # noqa
-        >> sql_gcp_add_bucket_permission_task  # noqa
-        >> sql_export_task  # noqa
-        >> sql_export_task2  # noqa
-        >> sql_gcp_add_object_permission_task  # noqa
-        >> sql_gcp_add_bucket_permission_2_task  # noqa
-        >> sql_import_task  # noqa
-        >> sql_import_task2  # noqa
-        >> sql_db_delete_task  # noqa
-        >> sql_db_delete_task2  # noqa
-        >> sql_instance_failover_replica_delete_task  # noqa
-        >> sql_instance_read_replica_delete_task  # noqa
-        >> sql_instance_delete_task  # noqa
-        >> sql_instance_delete_2_task  # noqa
+        sql_instance_create_task
+        >> sql_instance_create_2_task
+        >> sql_instance_read_replica_create
+        >> sql_instance_patch_task
+        >> sql_instance_patch_task2
+        >> sql_db_create_task
+        >> sql_db_create_task2
+        >> sql_db_patch_task
+        >> sql_db_patch_task2
+        >> sql_gcp_add_bucket_permission_task
+        >> sql_export_task
+        >> sql_export_task2
+        >> sql_gcp_add_object_permission_task
+        >> sql_gcp_add_bucket_permission_2_task
+        >> sql_import_task
+        >> sql_import_task2
+        >> sql_db_delete_task
+        >> sql_db_delete_task2
+        >> sql_instance_failover_replica_delete_task
+        >> sql_instance_read_replica_delete_task
+        >> sql_instance_delete_task
+        >> sql_instance_delete_2_task
     )
diff --git a/airflow/providers/google/cloud/example_dags/example_tasks.py b/airflow/providers/google/cloud/example_dags/example_tasks.py
index 82335de..dc2759f 100644
--- a/airflow/providers/google/cloud/example_dags/example_tasks.py
+++ b/airflow/providers/google/cloud/example_dags/example_tasks.py
@@ -49,7 +49,7 @@ from airflow.providers.google.cloud.operators.tasks import (
 from airflow.utils.dates import days_ago
 
 timestamp = timestamp_pb2.Timestamp()
-timestamp.FromDatetime(datetime.now() + timedelta(hours=12))  # pylint: disable=no-member
+timestamp.FromDatetime(datetime.now() + timedelta(hours=12))
 
 LOCATION = "europe-west1"
 QUEUE_ID = os.environ.get('GCP_TASKS_QUEUE_ID', "cloud-tasks-queue")
diff --git a/airflow/providers/google/cloud/example_dags/example_vision.py b/airflow/providers/google/cloud/example_dags/example_vision.py
index 516a70d..9e38a31 100644
--- a/airflow/providers/google/cloud/example_dags/example_vision.py
+++ b/airflow/providers/google/cloud/example_dags/example_vision.py
@@ -57,23 +57,23 @@ from airflow.providers.google.cloud.operators.vision import (
 from airflow.utils.dates import days_ago
 
 # [START howto_operator_vision_retry_import]
-from google.api_core.retry import Retry  # isort:skip pylint: disable=wrong-import-order
+from google.api_core.retry import Retry  # isort:skip
 
 # [END howto_operator_vision_retry_import]
 # [START howto_operator_vision_product_set_import]
-from google.cloud.vision_v1.types import ProductSet  # isort:skip pylint: disable=wrong-import-order
+from google.cloud.vision_v1.types import ProductSet  # isort:skip
 
 # [END howto_operator_vision_product_set_import]
 # [START howto_operator_vision_product_import]
-from google.cloud.vision_v1.types import Product  # isort:skip pylint: disable=wrong-import-order
+from google.cloud.vision_v1.types import Product  # isort:skip
 
 # [END howto_operator_vision_product_import]
 # [START howto_operator_vision_reference_image_import]
-from google.cloud.vision_v1.types import ReferenceImage  # isort:skip pylint: disable=wrong-import-order
+from google.cloud.vision_v1.types import ReferenceImage  # isort:skip
 
 # [END howto_operator_vision_reference_image_import]
 # [START howto_operator_vision_enums_import]
-from google.cloud.vision import enums  # isort:skip pylint: disable=wrong-import-order
+from google.cloud.vision import enums  # isort:skip
 
 # [END howto_operator_vision_enums_import]
 
diff --git a/airflow/providers/google/cloud/hooks/automl.py b/airflow/providers/google/cloud/hooks/automl.py
index a21fa0a..63a002a 100644
--- a/airflow/providers/google/cloud/hooks/automl.py
+++ b/airflow/providers/google/cloud/hooks/automl.py
@@ -332,7 +332,7 @@ class CloudAutoMLHook(GoogleBaseHook):
         return result
 
     @GoogleBaseHook.fallback_to_default_project_id
-    def list_column_specs(  # pylint: disable=too-many-arguments
+    def list_column_specs(
         self,
         dataset_id: str,
         table_spec_id: str,
diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py
index e36baf5..20afbc0 100644
--- a/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/airflow/providers/google/cloud/hooks/bigquery.py
@@ -63,7 +63,6 @@ log = logging.getLogger(__name__)
 BigQueryJob = Union[CopyJob, QueryJob, LoadJob, ExtractJob]
 
 
-# pylint: disable=too-many-public-methods
 class BigQueryHook(GoogleBaseHook, DbApiHook):
     """
     Interact with BigQuery. This hook uses the Google Cloud connection.
@@ -285,7 +284,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
             return False
 
     @GoogleBaseHook.fallback_to_default_project_id
-    def create_empty_table(  # pylint: disable=too-many-arguments
+    def create_empty_table(
         self,
         project_id: Optional[str] = None,
         dataset_id: Optional[str] = None,
@@ -533,7 +532,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
         )
 
     @GoogleBaseHook.fallback_to_default_project_id
-    def create_external_table(  # pylint: disable=too-many-locals,too-many-arguments
+    def create_external_table(
         self,
         external_project_dataset_table: str,
         schema_fields: List,
@@ -753,7 +752,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
         return table_object.to_api_repr()
 
     @GoogleBaseHook.fallback_to_default_project_id
-    def patch_table(  # pylint: disable=too-many-arguments
+    def patch_table(
         self,
         dataset_id: str,
         table_id: str,
@@ -1012,7 +1011,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
 
         self.log.info('Start patching dataset: %s:%s', dataset_project_id, dataset_id)
         dataset = (
-            service.datasets()  # pylint: disable=no-member
+            service.datasets()
             .patch(
                 datasetId=dataset_id,
                 projectId=dataset_project_id,
@@ -1619,14 +1618,14 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
             "configuration": configuration,
             "jobReference": {"jobId": job_id, "projectId": project_id, "location": location},
         }
-        # pylint: disable=protected-access
+
         supported_jobs = {
             LoadJob._JOB_TYPE: LoadJob,
             CopyJob._JOB_TYPE: CopyJob,
             ExtractJob._JOB_TYPE: ExtractJob,
             QueryJob._JOB_TYPE: QueryJob,
         }
-        # pylint: enable=protected-access
+
         job = None
         for job_type, job_object in supported_jobs.items():
             if job_type in configuration:
@@ -1659,7 +1658,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
         self.running_job_id = job.job_id
         return job.job_id
 
-    def run_load(  # pylint: disable=too-many-locals,too-many-arguments,invalid-name
+    def run_load(
         self,
         destination_project_dataset_table: str,
         source_uris: List,
@@ -1784,7 +1783,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
         # we check to make sure the passed source format is valid
         # if it's not, we raise a ValueError
         # Refer to this link for more details:
-        #   https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).sourceFormat # noqa # pylint: disable=line-too-long
+        #   https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).sourceFormat # noqa
 
         if schema_fields is None and not autodetect:
             raise ValueError('You must either pass a schema or autodetect=True.')
@@ -1922,7 +1921,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
         self.running_job_id = job.job_id
         return job.job_id
 
-    def run_copy(  # pylint: disable=invalid-name
+    def run_copy(
         self,
         source_project_dataset_tables: Union[List, str],
         destination_project_dataset_table: str,
@@ -2090,7 +2089,6 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
         self.running_job_id = job.job_id
         return job.job_id
 
-    # pylint: disable=too-many-locals,too-many-arguments, too-many-branches
     def run_query(
         self,
         sql: str,
@@ -2224,7 +2222,7 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
         # BigQuery also allows you to define how you want a table's schema to change
         # as a side effect of a query job
         # for more details:
-        #   https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.schemaUpdateOptions  # noqa # pylint: disable=line-too-long
+        #   https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.schemaUpdateOptions  # noqa
 
         allowed_schema_update_options = ['ALLOW_FIELD_ADDITION', "ALLOW_FIELD_RELAXATION"]
 
@@ -2366,18 +2364,18 @@ class BigQueryConnection:
         self._args = args
         self._kwargs = kwargs
 
-    def close(self) -> None:  # noqa: D403
-        """BigQueryConnection does not have anything to close"""
+    def close(self) -> None:
+        """The BigQueryConnection does not have anything to close"""
 
-    def commit(self) -> None:  # noqa: D403
-        """BigQueryConnection does not support transactions"""
+    def commit(self) -> None:
+        """The BigQueryConnection does not support transactions"""
 
-    def cursor(self) -> "BigQueryCursor":  # noqa: D403
+    def cursor(self) -> "BigQueryCursor":
         """Return a new :py:class:`Cursor` object using the connection"""
         return BigQueryCursor(*self._args, **self._kwargs)
 
-    def rollback(self) -> NoReturn:  # noqa: D403
-        """BigQueryConnection does not have transactions"""
+    def rollback(self) -> NoReturn:
+        """The BigQueryConnection does not have transactions"""
         raise NotImplementedError("BigQueryConnection does not have transactions")
 
 
@@ -2659,7 +2657,7 @@ class BigQueryBaseCursor(LoggingMixin):
             DeprecationWarning,
             stacklevel=3,
         )
-        return self.hook.cancel_query(*args, **kwargs)  # type: ignore  # noqa
+        return self.hook.cancel_query(*args, **kwargs)  # type: ignore
 
     def run_with_configuration(self, *args, **kwargs) -> str:
         """
@@ -2807,7 +2805,6 @@ class BigQueryCursor(BigQueryBaseCursor):
 
     def fetchone(self) -> Union[List, None]:
         """Fetch the next row of a query result set"""
-        # pylint: disable=not-callable
         return self.next()
 
     def next(self) -> Union[List, None]:
diff --git a/airflow/providers/google/cloud/hooks/bigtable.py b/airflow/providers/google/cloud/hooks/bigtable.py
index 60e309d..148e5e4 100644
--- a/airflow/providers/google/cloud/hooks/bigtable.py
+++ b/airflow/providers/google/cloud/hooks/bigtable.py
@@ -38,7 +38,6 @@ class BigtableHook(GoogleBaseHook):
     keyword arguments rather than positional.
     """
 
-    # pylint: disable=too-many-arguments
     def __init__(
         self,
         gcp_conn_id: str = "google_cloud_default",
diff --git a/airflow/providers/google/cloud/hooks/cloud_build.py b/airflow/providers/google/cloud/hooks/cloud_build.py
index 07669d3..c228dfa 100644
--- a/airflow/providers/google/cloud/hooks/cloud_build.py
+++ b/airflow/providers/google/cloud/hooks/cloud_build.py
@@ -100,7 +100,7 @@ class CloudBuildHook(GoogleBaseHook):
 
         # Create build
         response = (
-            service.projects()  # pylint: disable=no-member
+            service.projects()
             .builds()
             .create(projectId=project_id, body=body)
             .execute(num_retries=self.num_retries)
@@ -114,7 +114,7 @@ class CloudBuildHook(GoogleBaseHook):
         build_id = response["metadata"]["build"]["id"]
 
         result = (
-            service.projects()  # pylint: disable=no-member
+            service.projects()
             .builds()
             .get(projectId=project_id, id=build_id)
             .execute(num_retries=self.num_retries)
@@ -136,10 +136,7 @@ class CloudBuildHook(GoogleBaseHook):
... 15151 lines suppressed ...

[airflow] 01/03: Add Python 3.9 support (#15515) (#16883)

Posted by jh...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jhtimmins pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 826b5c5b50dab18cab235c7c60a7528cc2fa8f4d
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Thu Jul 8 12:35:43 2021 +0200

    Add Python 3.9 support (#15515) (#16883)
    
    This includes several things:
    
    * added per-provider support for python version. Each provider
      can now declare python versions it does not support
    * excluded ldap core extra from Python 3.9.
    * skip relevant tests in Python 3.9
    
    (cherry picked from commit ce44b628904e4f7480a2c208b5d5e087526408b6)
---
 BREEZE.rst                                         | 14 +++++-----
 CI.rst                                             |  4 +--
 CONTRIBUTING.rst                                   |  2 +-
 CONTRIBUTORS_QUICK_START.rst                       |  2 +-
 Dockerfile                                         |  7 +++--
 Dockerfile.ci                                      |  8 +++---
 IMAGES.rst                                         |  2 +-
 LOCAL_VIRTUALENV.rst                               |  6 ++---
 PULL_REQUEST_WORKFLOW.rst                          |  2 +-
 README.md                                          |  4 +--
 airflow/__init__.py                                |  3 ++-
 airflow/provider.yaml.schema.json                  |  7 +++++
 airflow/providers/apache/hive/provider.yaml        |  3 +++
 breeze                                             |  4 +--
 breeze-complete                                    |  2 +-
 .../PROVIDER_README_TEMPLATE.rst.jinja2            |  2 ++
 dev/provider_packages/SETUP_TEMPLATE.py.jinja2     |  8 +++---
 dev/provider_packages/prepare_provider_packages.py | 12 +++++++++
 dev/retag_docker_images.py                         |  2 +-
 scripts/ci/libraries/_build_images.sh              |  3 +--
 scripts/ci/libraries/_initialization.sh            |  4 +--
 scripts/ci/libraries/_push_pull_remove_images.sh   |  4 +--
 scripts/ci/selective_ci_checks.sh                  |  4 +--
 scripts/ci/tools/ci_fix_ownership.sh               |  2 +-
 scripts/ci/tools/prepare_prod_docker_images.sh     |  2 +-
 setup.cfg                                          |  3 ++-
 tests/bats/breeze/test_breeze_complete.bats        |  8 +++---
 tests/plugins/test_plugins_manager.py              | 28 ++++++++++++++-----
 tests/providers/apache/hive/hooks/test_hive.py     | 31 ++++++++++++++++++++++
 .../apache/hive/transfers/test_hive_to_mysql.py    |  9 +++++++
 .../apache/hive/transfers/test_hive_to_samba.py    |  9 +++++++
 .../apache/hive/transfers/test_mssql_to_hive.py    | 14 +++++++---
 .../apache/hive/transfers/test_mysql_to_hive.py    |  7 +++++
 .../log/elasticmock/fake_elasticsearch.py          |  2 +-
 tests/sensors/test_base.py                         |  1 +
 tests/sensors/test_smart_sensor_operator.py        |  2 ++
 36 files changed, 166 insertions(+), 61 deletions(-)

diff --git a/BREEZE.rst b/BREEZE.rst
index 9b000b3..549addd 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -1263,7 +1263,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -a, --install-airflow-version INSTALL_AIRFLOW_VERSION
           Uses different version of Airflow when building PROD image.
@@ -1493,7 +1493,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -I, --production-image
           Use production image for entering the environment and builds (not for tests).
@@ -1560,7 +1560,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -v, --verbose
           Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
@@ -1683,7 +1683,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
 
   ####################################################################################################
@@ -1878,7 +1878,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -b, --backend BACKEND
           Backend to use for tests - it determines which database is used.
@@ -1942,7 +1942,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   -F, --force-build-images
           Forces building of the local docker images. The images are rebuilt
@@ -2349,7 +2349,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
 
           One of:
 
-                 3.6 3.7 3.8
+                 3.6 3.7 3.8 3.9
 
   ****************************************************************************************************
    Choose backend to run for Airflow
diff --git a/CI.rst b/CI.rst
index 03c076c..260d570 100644
--- a/CI.rst
+++ b/CI.rst
@@ -57,7 +57,7 @@ Container Registry used as cache
 For the CI builds of our we are using Container Registry to store results of the "Build Image" workflow
 and pass it to the "CI Build" workflow.
 
-Currently in main version of Airflow we run tests in 3 different versions of Python (3.6, 3.7, 3.8)
+Currently in main version of Airflow we run tests in 4 different versions of Python (3.6, 3.7, 3.8, 3.9)
 which means that we have to build 6 images (3 CI ones and 3 PROD ones). Yet we run around 12 jobs
 with each of the CI images. That is a lot of time to just build the environment to run. Therefore
 we are utilising ``pull_request_target`` feature of GitHub Actions.
@@ -779,7 +779,7 @@ The image names follow the patterns:
 +--------------+----------------------------+--------------------------------+--------------------------------------------------------------------------------------------+
 
 * <BRANCH> might be either "main" or "v1-10-test" or "v2-*-test"
-* <X.Y> - Python version (Major + Minor). For "main" and "v2-*-test" should be in ["3.6", "3.7", "3.8"].
+* <X.Y> - Python version (Major + Minor). For "main" and "v2-*-test" should be in ["3.6", "3.7", "3.8", "3.9"].
 * <COMMIT_SHA> - for images that get merged to "main", "v2-*-test" of "v1-10-test", or built as part of a
   pull request the images are tagged with the (full lenght) commit SHA of that particular branch. For pull
   requests the SHA used is the tip of the pull request branch.
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c51161c..21cd85e 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -873,7 +873,7 @@ This can be done by running this (it utilizes parallel preparation of the constr
 
 .. code-block:: bash
 
-    export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.6 3.7 3.8"
+    export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.6 3.7 3.8 3.9"
     for python_version in $(echo "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}")
     do
       ./breeze build-image --upgrade-to-newer-dependencies --python ${python_version} --build-cache-local
diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst
index 0b25b0c..d487b2d 100644
--- a/CONTRIBUTORS_QUICK_START.rst
+++ b/CONTRIBUTORS_QUICK_START.rst
@@ -119,7 +119,7 @@ Pyenv and setting up virtual-env
       libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \
       xz-utils tk-dev libffi-dev liblzma-dev python-openssl git
 
-  $ sudo apt install build-essentials python3.6-dev python3.7-dev python3.8-dev python-dev openssl \
+  $ sudo apt install build-essentials python3.6-dev python3.7-dev python3.8-dev python3.9-dev python-dev openssl \
        sqlite sqlite-dev default-libmysqlclient-dev libmysqld-dev postgresql
 
 2. Install pyenv
diff --git a/Dockerfile b/Dockerfile
index 39a13dc..6a4b75d 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -44,7 +44,7 @@ ARG AIRFLOW_GID="50000"
 
 ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster"
 
-ARG AIRFLOW_PIP_VERSION=21.1.1
+ARG AIRFLOW_PIP_VERSION=21.1.2
 
 # By default PIP has progress bar but you can disable it.
 ARG PIP_PROGRESS_BAR="on"
@@ -232,11 +232,10 @@ ARG INSTALL_FROM_DOCKER_CONTEXT_FILES=""
 ARG INSTALL_FROM_PYPI="true"
 # Those are additional constraints that are needed for some extras but we do not want to
 # Force them on the main Airflow package.
-# * chardet<4 and certifi<2021.0.0 required to keep snowflake happy
-# * urllib3 - required to keep boto3 happy
+# * certifi<2021.0.0 required to keep snowflake happy
 # * pyjwt<2.0.0: flask-jwt-extended requires it
 # * dill<0.3.3 required by apache-beam
-ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 urllib3<1.26 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
+ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
 ARG CONTINUE_ON_PIP_CHECK_FAILURE="false"
 
 
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 29438c7..7b1dbf1 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -207,7 +207,7 @@ ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
 # By default in the image, we are installing all providers when installing from sources
 ARG INSTALL_PROVIDERS_FROM_SOURCES="true"
 ARG INSTALL_FROM_PYPI="true"
-ARG AIRFLOW_PIP_VERSION=21.1.1
+ARG AIRFLOW_PIP_VERSION=21.1.2
 # Setup PIP
 # By default PIP install run without cache to make image smaller
 ARG PIP_NO_CACHE_DIR="true"
@@ -251,13 +251,11 @@ ENV AIRFLOW_REPO=${AIRFLOW_REPO}\
 
 # Those are additional constraints that are needed for some extras but we do not want to
 # force them on the main Airflow package. Those limitations are:
-# * chardet<4 and certifi<2021.0.0: required by snowflake provider
+# * certifi<2021.0.0: required by snowflake provider
 # * lazy-object-proxy<1.5.0: required by astroid
-# * pyOpenSSL: required by snowflake provider https://github.com/snowflakedb/snowflake-connector-python/blob/v2.3.6/setup.py#L201
-# * urllib3<1.26: Required to keep boto3 happy
 # * pyjwt<2.0.0: flask-jwt-extended requires it
 # * dill<0.3.3 required by apache-beam
-ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 lazy-object-proxy<1.5.0 pyOpenSSL<20.0.0 urllib3<1.26 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
+ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="lazy-object-proxy<1.5.0 pyjwt<2.0.0 dill<0.3.3 certifi<2021.0.0"
 ARG UPGRADE_TO_NEWER_DEPENDENCIES="false"
 ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} \
     UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES}
diff --git a/IMAGES.rst b/IMAGES.rst
index e38257f..c3a1805 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -81,7 +81,7 @@ where:
   built from branches so they change over time. The ``2.*.*`` labels are built from git tags
   and they are "fixed" once built.
 * ``PYTHON_MAJOR_MINOR_VERSION`` - version of Python used to build the image. Examples: ``3.6``, ``3.7``,
-  ``3.8``
+  ``3.8``, ``3.9``
 * The ``-ci`` suffix is added for CI images
 * The ``-manifest`` is added for manifest images (see below for explanation of manifest images)
 
diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst
index f97f89a..b389442 100644
--- a/LOCAL_VIRTUALENV.rst
+++ b/LOCAL_VIRTUALENV.rst
@@ -51,8 +51,8 @@ Required Software Packages
 Use system-level package managers like yum, apt-get for Linux, or
 Homebrew for macOS to install required software packages:
 
-* Python (One of: 3.6, 3.7, 3.8)
-* MySQL
+* Python (One of: 3.6, 3.7, 3.8, 3.9)
+* MySQL 5.7+
 * libxml
 
 Refer to the `Dockerfile.ci <Dockerfile.ci>`__ for a comprehensive list
@@ -102,7 +102,7 @@ Creating a Local virtualenv
 
 To use your IDE for Airflow development and testing, you need to configure a virtual
 environment. Ideally you should set up virtualenv for all Python versions that Airflow
-supports (3.6, 3.7, 3.8).
+supports (3.6, 3.7, 3.8, 3.9).
 
 To create and initialize the local virtualenv:
 
diff --git a/PULL_REQUEST_WORKFLOW.rst b/PULL_REQUEST_WORKFLOW.rst
index 96cc5b3..3e3cce6 100644
--- a/PULL_REQUEST_WORKFLOW.rst
+++ b/PULL_REQUEST_WORKFLOW.rst
@@ -58,7 +58,7 @@ We approached the problem by:
 3) Even more optimisation came from limiting the scope of tests to only "default" matrix parameters. So far
    in Airflow we always run all tests for all matrix combinations. The primary matrix components are:
 
-   * Python versions (currently 3.6, 3.7, 3.8)
+   * Python versions (currently 3.6, 3.7, 3.8, 3.9)
    * Backend types (currently MySQL/Postgres)
    * Backed version (currently MySQL 5.7, MySQL 8, Postgres 9.6, Postgres 13
 
diff --git a/README.md b/README.md
index ef77abf..b6614c2 100644
--- a/README.md
+++ b/README.md
@@ -78,9 +78,9 @@ Airflow is not a streaming solution, but it is often used to process real-time d
 
 Apache Airflow is tested with:
 
-|                      | Main version (dev)        | Stable version (2.0.2)   |
+|                      | Main version (dev)        | Stable version (2.1.0)   |
 | -------------------- | ------------------------- | ------------------------ |
-| Python               | 3.6, 3.7, 3.8             | 3.6, 3.7, 3.8            |
+| Python               | 3.6, 3.7, 3.8, 3.9        | 3.6, 3.7, 3.8            |
 | Kubernetes           | 1.20, 1.19, 1.18          | 1.20, 1.19, 1.18         |
 | PostgreSQL           | 9.6, 10, 11, 12, 13       | 9.6, 10, 11, 12, 13      |
 | MySQL                | 5.7, 8                    | 5.7, 8                   |
diff --git a/airflow/__init__.py b/airflow/__init__.py
index 7ecc487..6d04886 100644
--- a/airflow/__init__.py
+++ b/airflow/__init__.py
@@ -36,7 +36,7 @@ from airflow import version
 
 __version__ = version.version
 
-__all__ = ['__version__', 'login', 'DAG']
+__all__ = ['__version__', 'login', 'DAG', 'PY36', 'PY37', 'PY38', 'PY39']
 
 # Make `airflow` an namespace package, supporting installing
 # airflow.providers.* in different locations (i.e. one in site, and one in user
@@ -50,6 +50,7 @@ login: Optional[Callable] = None
 PY36 = sys.version_info >= (3, 6)
 PY37 = sys.version_info >= (3, 7)
 PY38 = sys.version_info >= (3, 8)
+PY39 = sys.version_info >= (3, 9)
 
 
 def __getattr__(name):
diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json
index b8a2c13..6891206 100644
--- a/airflow/provider.yaml.schema.json
+++ b/airflow/provider.yaml.schema.json
@@ -28,6 +28,13 @@
         "type": "string"
       }
     },
+    "excluded-python-versions": {
+      "description": "List of python versions excluded for that provider",
+      "type": "array",
+      "items": {
+          "type": "string"
+      }
+    },
     "integrations": {
       "description": "List of integrations supported by the provider.",
       "type": "array",
diff --git a/airflow/providers/apache/hive/provider.yaml b/airflow/providers/apache/hive/provider.yaml
index 803b6ec..9d02184 100644
--- a/airflow/providers/apache/hive/provider.yaml
+++ b/airflow/providers/apache/hive/provider.yaml
@@ -31,6 +31,9 @@ versions:
 additional-dependencies:
   - apache-airflow>=2.1.0
 
+excluded-python-versions:
+  - "3.9"
+
 integrations:
   - integration-name: Apache Hive
     external-doc-url: https://hive.apache.org/
diff --git a/breeze b/breeze
index 48021f9..aac92e0 100755
--- a/breeze
+++ b/breeze
@@ -3507,7 +3507,7 @@ function breeze::run_breeze_command() {
         docker_engine_resources::check_all_resources
         if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
             ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}"
-            ${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh"
+            ${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh"  || true
         else
             ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}"
         fi
@@ -3621,7 +3621,7 @@ function breeze::run_breeze_command() {
 # We have different versions of images depending on the python version used. We keep up with the
 # Latest patch-level changes in Python (this is done automatically during CI builds) so we have
 # To only take into account MAJOR and MINOR version of python. This variable keeps the major/minor
-# version of python in X.Y format (3.6, 3.7, 3.8 etc).
+# version of python in X.Y format (3.6, 3.7, 3.8, 3.9).
 #
 # In Breeze the precedence of setting the version is as follows:
 #      1. --python flag (if set, it will explicitly override it in the next step)
diff --git a/breeze-complete b/breeze-complete
index aac5076..d445194e 100644
--- a/breeze-complete
+++ b/breeze-complete
@@ -23,7 +23,7 @@
 # by the BATS tests automatically during pre-commit and CI
 # Those cannot be made read-only as the breeze-complete must be re-sourceable
 
-_breeze_allowed_python_major_minor_versions="3.6 3.7 3.8"
+_breeze_allowed_python_major_minor_versions="3.6 3.7 3.8 3.9"
 _breeze_allowed_backends="sqlite mysql postgres"
 _breeze_allowed_integrations="cassandra kerberos mongo openldap pinot rabbitmq redis statsd trino all"
 _breeze_allowed_generate_constraints_modes="source-providers pypi-providers no-providers"
diff --git a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
index c9837b1..cb65758 100644
--- a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
+++ b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
@@ -47,6 +47,8 @@ Installation
 You can install this package on top of an existing airflow 2.1+ installation via
 ``pip install {{PACKAGE_PIP_NAME}}``
 
+The package supports the following python versions: {{ ",".join(SUPPORTED_PYTHON_VERSIONS) }}
+
 {%- if PIP_REQUIREMENTS %}
 
 PIP requirements
diff --git a/dev/provider_packages/SETUP_TEMPLATE.py.jinja2 b/dev/provider_packages/SETUP_TEMPLATE.py.jinja2
index cfe82e2..69dcdac 100644
--- a/dev/provider_packages/SETUP_TEMPLATE.py.jinja2
+++ b/dev/provider_packages/SETUP_TEMPLATE.py.jinja2
@@ -68,16 +68,16 @@ def do_setup():
             'Intended Audience :: Developers',
             'Intended Audience :: System Administrators',
             'License :: OSI Approved :: Apache Software License',
-            'Programming Language :: Python :: 3.6',
-            'Programming Language :: Python :: 3.7',
-            'Programming Language :: Python :: 3.8',
+{%- for python_version in SUPPORTED_PYTHON_VERSIONS %}
+            'Programming Language :: Python :: {{ python_version }}',
+{%- endfor %}
             'Topic :: System :: Monitoring',
         ],
         author='Apache Software Foundation',
         author_email='dev@airflow.apache.org',
         url='https://airflow.apache.org/',
         download_url='https://archive.apache.org/dist/airflow/{{ PROVIDERS_FOLDER }}',
-        python_requires='~=3.6',
+        python_requires='{{ PYTHON_REQUIRES }}',
         project_urls={
             'Documentation': 'https://airflow.apache.org/docs/{{ PACKAGE_PIP_NAME }}/{{RELEASE}}/',
             'Bug Tracker': 'https://github.com/apache/airflow/issues',
diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index c27c88b..49dc393 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -51,6 +51,8 @@ from rich.console import Console
 from rich.progress import Progress
 from rich.syntax import Syntax
 
+ALL_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+
 try:
     from yaml import CSafeLoader as SafeLoader
 except ImportError:
@@ -208,6 +210,7 @@ class ProviderPackageDetails(NamedTuple):
     documentation_provider_package_path: str
     provider_description: str
     versions: List[str]
+    excluded_python_versions: List[str]
 
 
 ENTITY_NAMES = {
@@ -1445,6 +1448,7 @@ def get_provider_details(provider_package_id: str) -> ProviderPackageDetails:
         documentation_provider_package_path=get_documentation_package_path(provider_package_id),
         provider_description=provider_info['description'],
         versions=provider_info['versions'],
+        excluded_python_versions=provider_info.get("excluded-python-versions") or [],
     )
 
 
@@ -1483,6 +1487,12 @@ def get_provider_jinja_context(
     )
     with open(changelog_path) as changelog_file:
         changelog = changelog_file.read()
+    supported_python_versions = [
+        p for p in ALL_PYTHON_VERSIONS if p not in provider_details.excluded_python_versions
+    ]
+    python_requires = "~=3.6"
+    for p in provider_details.excluded_python_versions:
+        python_requires += f", !={p}"
     context: Dict[str, Any] = {
         "ENTITY_TYPES": list(EntityType),
         "README_FILE": "README.rst",
@@ -1517,6 +1527,8 @@ def get_provider_jinja_context(
             provider_details.documentation_provider_package_path,
         ),
         "CHANGELOG": changelog,
+        "SUPPORTED_PYTHON_VERSIONS": supported_python_versions,
+        "PYTHON_REQUIRES": python_requires,
     }
     return context
 
diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py
index 2e83475..91e8dc4 100755
--- a/dev/retag_docker_images.py
+++ b/dev/retag_docker_images.py
@@ -47,7 +47,7 @@ from typing import List
 
 import click
 
-PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
 
 DOCKERHUB_IMAGES = [
     "{prefix}:python{python_version}-{branch}",
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index 099cf85..598d944 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -495,7 +495,6 @@ function build_images::rebuild_ci_image_if_needed() {
         push_pull_remove_images::pull_ci_images_if_needed
         return
     fi
-
     local needs_docker_build="false"
     md5sum::check_if_docker_build_is_needed
     build_images::get_local_build_cache_hash
@@ -526,7 +525,7 @@ function build_images::rebuild_ci_image_if_needed() {
                 local root_files_count
                 root_files_count=$(find "airflow" "tests" -user root | wc -l | xargs)
                 if [[ ${root_files_count} != "0" ]]; then
-                    ./scripts/ci/tools/ci_fix_ownership.sh
+                    ./scripts/ci/tools/ci_fix_ownership.sh || true
                 fi
             fi
             verbosity::print_info
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index e8b3c91..0bc9dc1 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -87,11 +87,11 @@ function initialization::initialize_base_variables() {
     export PRODUCTION_IMAGE="false"
 
     # All supported major/minor versions of python in all versions of Airflow
-    ALL_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8")
+    ALL_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8" "3.9")
     export ALL_PYTHON_MAJOR_MINOR_VERSIONS
 
     # Currently supported major/minor versions of python
-    CURRENT_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8")
+    CURRENT_PYTHON_MAJOR_MINOR_VERSIONS+=("3.6" "3.7" "3.8" "3.9")
     export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS
 
     # Currently supported versions of Postgres
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index d7bc17a..a6a1c5c 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -144,7 +144,7 @@ function push_pull_remove_images::pull_base_python_image() {
         push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PYTHON_BASE_IMAGE}" \
             "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${python_tag_suffix}"
     else
-        docker_v pull "${AIRFLOW_PYTHON_BASE_IMAGE}"
+        docker_v pull "${AIRFLOW_PYTHON_BASE_IMAGE}" || true
     fi
 }
 
@@ -161,7 +161,7 @@ function push_pull_remove_images::pull_ci_images_if_needed() {
             push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" \
                 "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}"
         else
-            push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_CI_IMAGE}"
+            push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_CI_IMAGE}" || true
         fi
     fi
 }
diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh
index 914dfaa..e7a9144 100755
--- a/scripts/ci/selective_ci_checks.sh
+++ b/scripts/ci/selective_ci_checks.sh
@@ -116,8 +116,8 @@ function output_all_basic_variables() {
 
     if [[ ${FULL_TESTS_NEEDED_LABEL} == "true" ]]; then
         initialization::ga_output postgres-exclude '[{ "python-version": "3.6" }]'
-        initialization::ga_output mysql-exclude '[{ "python-version": "3.7" }]'
-        initialization::ga_output sqlite-exclude '[{ "python-version": "3.8" }]'
+        initialization::ga_output mysql-exclude '[{ "python-version": "3.7" }, { "python-version": "3.9" }]'
+        initialization::ga_output sqlite-exclude '[{ "python-version": "3.7" }, { "python-version": "3.8" }]'
     else
         initialization::ga_output postgres-exclude '[]'
         initialization::ga_output mysql-exclude '[]'
diff --git a/scripts/ci/tools/ci_fix_ownership.sh b/scripts/ci/tools/ci_fix_ownership.sh
index 56463d2..6ed1161 100755
--- a/scripts/ci/tools/ci_fix_ownership.sh
+++ b/scripts/ci/tools/ci_fix_ownership.sh
@@ -37,4 +37,4 @@ docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \
     --rm \
     --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \
     "${AIRFLOW_CI_IMAGE}" \
-    -c /opt/airflow/scripts/in_container/run_fix_ownership.sh
+    -c /opt/airflow/scripts/in_container/run_fix_ownership.sh || true
diff --git a/scripts/ci/tools/prepare_prod_docker_images.sh b/scripts/ci/tools/prepare_prod_docker_images.sh
index bd3436f..ad6cc95 100755
--- a/scripts/ci/tools/prepare_prod_docker_images.sh
+++ b/scripts/ci/tools/prepare_prod_docker_images.sh
@@ -38,7 +38,7 @@ fi
 
 export INSTALL_AIRFLOW_VERSION="${1}"
 
-for python_version in "3.6" "3.7" "3.8"
+for python_version in "3.6" "3.7" "3.8" "3.9"
 do
   export PYTHON_MAJOR_MINOR_VERSION=${python_version}
   "${AIRFLOW_SOURCES_DIR}/scripts/ci/images/ci_build_dockerhub.sh"
diff --git a/setup.cfg b/setup.cfg
index 46bf15b..8b03296 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -56,6 +56,7 @@ classifiers =
     Programming Language :: Python :: 3.6
     Programming Language :: Python :: 3.7
     Programming Language :: Python :: 3.8
+    Programming Language :: Python :: 3.9
     Topic :: System :: Monitoring
 project_urls =
     Documentation=https://airflow.apache.org/docs/
@@ -106,7 +107,7 @@ install_requires =
     graphviz>=0.12
     gunicorn>=19.5.0
     httpx
-    importlib_metadata~=1.7;python_version<"3.9" # We could work with 3.1, but argparse needs <2
+    importlib_metadata>=1.7;python_version<"3.9"
     importlib_resources~=1.4
     # Required by vendored-in connexion
     inflection>=0.3.1
diff --git a/tests/bats/breeze/test_breeze_complete.bats b/tests/bats/breeze/test_breeze_complete.bats
index c1dfed1..3638b4d 100644
--- a/tests/bats/breeze/test_breeze_complete.bats
+++ b/tests/bats/breeze/test_breeze_complete.bats
@@ -25,7 +25,7 @@
   source "${AIRFLOW_SOURCES}/breeze-complete"
 
   breeze_complete::get_known_values_breeze "-p"
-  assert_equal "${_breeze_known_values}" "3.6 3.7 3.8"
+  assert_equal "${_breeze_known_values}" "3.6 3.7 3.8 3.9"
 }
 
 @test "Test get_known_values long" {
@@ -34,7 +34,7 @@
   source "${AIRFLOW_SOURCES}/breeze-complete"
 
   breeze_complete::get_known_values_breeze "--python"
-  assert_equal "${_breeze_known_values}" "3.6 3.7 3.8"
+  assert_equal "${_breeze_known_values}" "3.6 3.7 3.8 3.9"
 }
 
 @test "Test wrong get_known_values" {
@@ -125,7 +125,7 @@
   COMP_WORDS=("--python" "")
   breeze_complete::_comp_breeze
 
-  assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8"
+  assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8 3.9"
 }
 
 @test "Test autocomplete --python with prefix" {
@@ -136,7 +136,7 @@
   COMP_WORDS=("--python" "3")
   breeze_complete::_comp_breeze
 
-  assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8"
+  assert_equal "${COMPREPLY[*]}" "3.6 3.7 3.8 3.9"
 }
 
 @test "Test autocomplete build-" {
diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py
index 778cf08..23640a8 100644
--- a/tests/plugins/test_plugins_manager.py
+++ b/tests/plugins/test_plugins_manager.py
@@ -30,8 +30,24 @@ from airflow.www import app as application
 from tests.test_utils.config import conf_vars
 from tests.test_utils.mock_plugins import mock_plugin_manager
 
-py39 = sys.version_info >= (3, 9)
-importlib_metadata = 'importlib.metadata' if py39 else 'importlib_metadata'
+importlib_metadata_string = 'importlib_metadata'
+
+try:
+    import importlib_metadata  # pylint: disable=unused-import
+
+    # If importlib_metadata is installed, it takes precedence over built-in importlib.metadata in PY39
+    # so we should use the default declared above
+except ImportError:
+    try:
+        import importlib.metadata  # pylint: disable=unused-import
+
+        # only when we do not have importlib_metadata, the importlib.metadata is actually used
+        importlib_metadata = 'importlib.metadata'
+    except ImportError:
+        raise Exception(
+            "Either importlib_metadata must be installed or importlib.metadata must be"
+            " available in system libraries (Python 3.9+). We seem to have neither."
+        )
 
 ON_LOAD_EXCEPTION_PLUGIN = """
 from airflow.plugins_manager import AirflowPlugin
@@ -283,9 +299,9 @@ class TestPluginsManager:
         mock_entrypoint.load.side_effect = ImportError('my_fake_module not found')
         mock_dist.entry_points = [mock_entrypoint]
 
-        with mock.patch(f'{importlib_metadata}.distributions', return_value=[mock_dist]), caplog.at_level(
-            logging.ERROR, logger='airflow.plugins_manager'
-        ):
+        with mock.patch(
+            f'{importlib_metadata_string}.distributions', return_value=[mock_dist]
+        ), caplog.at_level(logging.ERROR, logger='airflow.plugins_manager'):
             load_entrypoint_plugins()
 
             received_logs = caplog.text
@@ -358,7 +374,7 @@ class TestEntryPointSource:
         mock_dist.version = '1.0.0'
         mock_dist.entry_points = [mock_entrypoint]
 
-        with mock.patch(f'{importlib_metadata}.distributions', return_value=[mock_dist]):
+        with mock.patch(f'{importlib_metadata_string}.distributions', return_value=[mock_dist]):
             plugins_manager.load_entrypoint_plugins()
 
         source = plugins_manager.EntryPointSource(mock_entrypoint, mock_dist)
diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py
index 179dbcc..94ff8b7 100644
--- a/tests/providers/apache/hive/hooks/test_hive.py
+++ b/tests/providers/apache/hive/hooks/test_hive.py
@@ -28,6 +28,7 @@ import pandas as pd
 import pytest
 from hmsclient import HMSClient
 
+from airflow import PY39
 from airflow.exceptions import AirflowException
 from airflow.models.connection import Connection
 from airflow.models.dag import DAG
@@ -44,6 +45,12 @@ DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
 DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveEnvironment(unittest.TestCase):
     def setUp(self):
         self.next_day = (DEFAULT_DATE + datetime.timedelta(days=1)).isoformat()[:10]
@@ -58,6 +65,12 @@ class TestHiveEnvironment(unittest.TestCase):
             self.hook = HiveMetastoreHook()
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveCliHook(unittest.TestCase):
     @mock.patch('tempfile.tempdir', '/tmp/')
     @mock.patch('tempfile._RandomNameSequence.__next__')
@@ -324,6 +337,12 @@ class TestHiveCliHook(unittest.TestCase):
         assert_equal_ignore_multiple_spaces(self, mock_run_cli.call_args_list[0][0][0], query)
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveMetastoreHook(TestHiveEnvironment):
     VALID_FILTER_MAP = {'key2': 'value2'}
 
@@ -549,6 +568,12 @@ class TestHiveMetastoreHook(TestHiveEnvironment):
         assert metastore_mock.drop_partition(self.table, db=self.database, part_vals=[DEFAULT_DATE_DS]), ret
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveServer2Hook(unittest.TestCase):
     def _upload_dataframe(self):
         df = pd.DataFrame({'a': [1, 2], 'b': [1, 2]})
@@ -797,6 +822,12 @@ class TestHiveServer2Hook(unittest.TestCase):
         assert 'test_dag_run_id' in output
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveCli(unittest.TestCase):
     def setUp(self):
         self.nondefault_schema = "nondefault"
diff --git a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
index c1fddd2..e85595f 100644
--- a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
+++ b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
@@ -20,6 +20,9 @@ import re
 import unittest
 from unittest.mock import MagicMock, patch
 
+import pytest
+
+from airflow import PY39
 from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
 from airflow.utils import timezone
 from airflow.utils.operator_helpers import context_to_airflow_vars
@@ -29,6 +32,12 @@ from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockMySqlHook
 DEFAULT_DATE = timezone.datetime(2015, 1, 1)
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHiveToMySqlTransfer(TestHiveEnvironment):
     def setUp(self):
         self.kwargs = dict(
diff --git a/tests/providers/apache/hive/transfers/test_hive_to_samba.py b/tests/providers/apache/hive/transfers/test_hive_to_samba.py
index 26c3329..c2a7cde 100644
--- a/tests/providers/apache/hive/transfers/test_hive_to_samba.py
+++ b/tests/providers/apache/hive/transfers/test_hive_to_samba.py
@@ -19,12 +19,21 @@ import os
 import unittest
 from unittest.mock import MagicMock, Mock, PropertyMock, patch
 
+import pytest
+
+from airflow import PY39
 from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator
 from airflow.utils.operator_helpers import context_to_airflow_vars
 from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
 from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockSambaHook
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 class TestHive2SambaOperator(TestHiveEnvironment):
     def setUp(self):
         self.kwargs = dict(
diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
index 99455da..881ea13 100644
--- a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
@@ -21,7 +21,9 @@ import unittest
 from collections import OrderedDict
 from unittest.mock import Mock, PropertyMock, patch
 
-from airflow import PY38
+import pytest
+
+from airflow import PY38, PY39
 
 if PY38:
     MsSqlToHiveTransferOperator: None = None
@@ -34,8 +36,14 @@ except ImportError:
     pymssql = None
 
 
-@unittest.skipIf(PY38, "Mssql package not available when Python >= 3.8.")
-@unittest.skipIf(pymssql is None, 'pymssql package not present')
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
+@pytest.mark.skipif(PY38, reason="Mssql package not available when Python >= 3.8.")
+@pytest.mark.skipif(pymssql is None, reason='pymssql package not present')
 class TestMsSqlToHiveTransfer(unittest.TestCase):
     def setUp(self):
         self.kwargs = dict(sql='sql', hive_table='table', task_id='test_mssql_to_hive', dag=None)
diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
index 1b7e2cd..0bc5996 100644
--- a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
@@ -24,6 +24,7 @@ from unittest import mock
 
 import pytest
 
+from airflow import PY39
 from airflow.models.dag import DAG
 from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator
 from airflow.providers.mysql.hooks.mysql import MySqlHook
@@ -58,6 +59,12 @@ class HiveopTempDir:
         return tail.startswith("airflow_hiveop_")
 
 
+@pytest.mark.skipif(
+    PY39,
+    reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
+    " This could be removed when https://github.com/dropbox/PyHive/issues/380"
+    " is solved",
+)
 @pytest.mark.backend("mysql")
 class TestTransfer(unittest.TestCase):
     def setUp(self):
diff --git a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py
index 4d6902a..cf81a1a 100644
--- a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py
+++ b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py
@@ -47,7 +47,7 @@ from elasticsearch.exceptions import NotFoundError
 from .utilities import get_random_id
 
 
-# pylint: disable=redefined-builtin
+# pylint: disable=redefined-builtin,unused-argument
 class FakeElasticsearch(Elasticsearch):
     __documents_dict = None
 
diff --git a/tests/sensors/test_base.py b/tests/sensors/test_base.py
index d477011..b148e80 100644
--- a/tests/sensors/test_base.py
+++ b/tests/sensors/test_base.py
@@ -15,6 +15,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+# pylint: disable=no-member
 
 import unittest
 from datetime import timedelta
diff --git a/tests/sensors/test_smart_sensor_operator.py b/tests/sensors/test_smart_sensor_operator.py
index 9ea0c00..0e41896 100644
--- a/tests/sensors/test_smart_sensor_operator.py
+++ b/tests/sensors/test_smart_sensor_operator.py
@@ -15,6 +15,8 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+# pylint: disable=no-member
+
 
 import datetime
 import logging