You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ur...@apache.org on 2022/10/24 05:15:28 UTC
[airflow] branch main updated: Enable string normalization in python formatting (other) (#27206)
This is an automated email from the ASF dual-hosted git repository.
uranusjr pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new bfcae349b8 Enable string normalization in python formatting (other) (#27206)
bfcae349b8 is described below
commit bfcae349b88fd959e32bfacd027a5be976fe2132
Author: Daniel Standish <15...@users.noreply.github.com>
AuthorDate: Sun Oct 23 22:15:20 2022 -0700
Enable string normalization in python formatting (other) (#27206)
---
.pre-commit-config.yaml | 8 +
STATIC_CODE_CHECKS.rst | 1 +
dev/airflow-github | 62 +-
dev/airflow-license | 22 +-
dev/assign_cherry_picked_prs_with_milestone.py | 10 +-
dev/breeze/README.md | 2 +-
dev/breeze/pyproject.toml | 1 -
dev/breeze/src/airflow_breeze/breeze.py | 2 +-
.../src/airflow_breeze/commands/ci_commands.py | 58 +-
.../airflow_breeze/commands/ci_image_commands.py | 32 +-
.../airflow_breeze/commands/developer_commands.py | 86 +-
.../airflow_breeze/commands/kubernetes_commands.py | 184 ++---
.../src/airflow_breeze/commands/main_command.py | 90 +--
.../commands/production_image_commands.py | 68 +-
.../commands/release_management_commands.py | 50 +-
.../src/airflow_breeze/commands/setup_commands.py | 130 +--
.../airflow_breeze/commands/testing_commands.py | 66 +-
dev/breeze/src/airflow_breeze/global_constants.py | 128 +--
.../src/airflow_breeze/params/build_ci_params.py | 2 +-
.../src/airflow_breeze/params/build_prod_params.py | 32 +-
.../airflow_breeze/params/common_build_params.py | 22 +-
.../src/airflow_breeze/params/doc_build_params.py | 2 +-
.../src/airflow_breeze/params/shell_params.py | 50 +-
dev/breeze/src/airflow_breeze/pre_commit_ids.py | 182 ++---
dev/breeze/src/airflow_breeze/utils/cache.py | 8 +-
dev/breeze/src/airflow_breeze/utils/ci_group.py | 2 +-
.../src/airflow_breeze/utils/common_options.py | 350 ++++----
dev/breeze/src/airflow_breeze/utils/confirm.py | 12 +-
dev/breeze/src/airflow_breeze/utils/console.py | 2 +-
.../airflow_breeze/utils/docker_command_utils.py | 140 ++--
.../utils/find_newer_dependencies.py | 6 +-
.../src/airflow_breeze/utils/github_actions.py | 2 +-
.../src/airflow_breeze/utils/host_info_utils.py | 12 +-
dev/breeze/src/airflow_breeze/utils/image.py | 2 +-
.../src/airflow_breeze/utils/kubernetes_utils.py | 54 +-
.../src/airflow_breeze/utils/md5_build_check.py | 14 +-
dev/breeze/src/airflow_breeze/utils/parallel.py | 54 +-
dev/breeze/src/airflow_breeze/utils/path_utils.py | 32 +-
dev/breeze/src/airflow_breeze/utils/recording.py | 8 +-
dev/breeze/src/airflow_breeze/utils/registry.py | 14 +-
dev/breeze/src/airflow_breeze/utils/reinstall.py | 2 +-
dev/breeze/src/airflow_breeze/utils/run_tests.py | 8 +-
dev/breeze/src/airflow_breeze/utils/run_utils.py | 46 +-
.../src/airflow_breeze/utils/selective_checks.py | 10 +-
dev/breeze/tests/test_cache.py | 20 +-
dev/breeze/tests/test_docker_command_utils.py | 70 +-
dev/breeze/tests/test_find_airflow_directory.py | 8 +-
dev/breeze/tests/test_host_info_utils.py | 2 +-
dev/breeze/tests/test_pr_info.py | 30 +-
dev/breeze/tests/test_run_utils.py | 8 +-
dev/breeze/tests/test_selective_checks.py | 4 +-
dev/chart/build_changelog_annotations.py | 6 +-
dev/check_files.py | 48 +-
dev/deprecations/generate_deprecated_dicts.py | 172 ++--
dev/example_dags/update_example_dags_paths.py | 10 +-
dev/prepare_bulk_issues.py | 4 +-
dev/prepare_release_issue.py | 30 +-
dev/provider_packages/prepare_provider_packages.py | 142 ++--
dev/provider_packages/remove_old_releases.py | 14 +-
dev/send_email.py | 4 +-
...calculate_statistics_provider_testing_issues.py | 4 +-
dev/stats/get_important_pr_candidates.py | 76 +-
dev/system_tests/update_issue_status.py | 16 +-
dev/validate_version_added_fields_in_config.py | 20 +-
docker_tests/docker_tests_utils.py | 2 +-
docker_tests/test_ci_image.py | 2 +-
docker_tests/test_docker_compose_quick_start.py | 10 +-
.../test_examples_of_prod_image_building.py | 10 +-
docker_tests/test_prod_image.py | 82 +-
docs/build_docs.py | 62 +-
docs/conf.py | 506 ++++++------
.../extending/embedding-dags/test_dag.py | 12 +-
docs/exts/airflow_intersphinx.py | 46 +-
docs/exts/docs_build/code_utils.py | 2 +-
docs/exts/docs_build/dev_index_generator.py | 10 +-
docs/exts/docs_build/docs_builder.py | 22 +-
docs/exts/docs_build/errors.py | 4 +-
docs/exts/docs_build/fetch_inventories.py | 20 +-
docs/exts/docs_build/github_action_utils.py | 2 +-
docs/exts/docs_build/lint_checks.py | 2 +-
docs/exts/docs_build/spelling_checks.py | 10 +-
docs/exts/docs_build/third_party_inventories.py | 68 +-
docs/exts/exampleinclude.py | 4 +-
docs/exts/extra_files_with_substitutions.py | 10 +-
docs/exts/operators_and_hooks_ref.py | 92 +--
docs/exts/provider_yaml_utils.py | 6 +-
docs/exts/providers_packages_ref.py | 8 +-
docs/exts/redirects.py | 2 +-
docs/exts/sphinx_script_update.py | 12 +-
docs/exts/substitution_extensions.py | 24 +-
docs/publish_docs.py | 24 +-
docs/rtd-deprecation/conf.py | 2 +-
docs/spelling_wordlist.txt | 1 +
kubernetes_tests/test_base.py | 66 +-
kubernetes_tests/test_kubernetes_executor.py | 24 +-
kubernetes_tests/test_kubernetes_pod_operator.py | 449 +++++------
.../test_kubernetes_pod_operator_backcompat.py | 273 ++++---
kubernetes_tests/test_other_executors.py | 24 +-
metastore_browser/hive_metastore.py | 50 +-
scripts/ci/pre_commit/common_precommit_utils.py | 2 +-
scripts/ci/pre_commit/pre_commit_boring_cyborg.py | 6 +-
.../ci/pre_commit/pre_commit_breeze_cmd_line.py | 18 +-
.../pre_commit_build_providers_dependencies.py | 10 +-
scripts/ci/pre_commit/pre_commit_chart_schema.py | 12 +-
.../pre_commit_check_2_2_compatibility.py | 2 +-
.../pre_commit/pre_commit_check_init_in_tests.py | 2 +-
.../ci/pre_commit/pre_commit_check_lazy_logging.py | 4 +-
.../pre_commit_check_order_dockerfile_extras.py | 12 +-
.../ci/pre_commit/pre_commit_check_order_setup.py | 30 +-
.../pre_commit_check_pre_commit_hooks.py | 32 +-
.../pre_commit_check_provider_yaml_files.py | 60 +-
.../pre_commit_check_setup_extra_packages_ref.py | 16 +-
.../ci/pre_commit/pre_commit_check_system_tests.py | 2 +-
...re_commit_check_system_tests_hidden_in_index.py | 2 +-
.../pre_commit_checkout_no_credentials.py | 12 +-
.../ci/pre_commit/pre_commit_compile_www_assets.py | 10 +-
.../pre_commit_compile_www_assets_dev.py | 8 +-
.../pre_commit/pre_commit_docstring_param_type.py | 8 +-
scripts/ci/pre_commit/pre_commit_flake8.py | 10 +-
scripts/ci/pre_commit/pre_commit_helm_lint.py | 8 +-
.../pre_commit_inline_scripts_in_docker.py | 12 +-
scripts/ci/pre_commit/pre_commit_insert_extras.py | 18 +-
scripts/ci/pre_commit/pre_commit_json_schema.py | 18 +-
.../ci/pre_commit/pre_commit_local_yml_mounts.py | 14 +-
.../pre_commit/pre_commit_migration_reference.py | 10 +-
scripts/ci/pre_commit/pre_commit_mypy.py | 10 +-
scripts/ci/pre_commit/pre_commit_newsfragments.py | 2 +-
.../pre_commit_replace_bad_characters.py | 12 +-
.../ci/pre_commit/pre_commit_sort_in_the_wild.py | 2 +-
.../pre_commit_sort_spelling_wordlist.py | 2 +-
.../ci/pre_commit/pre_commit_supported_versions.py | 2 +-
.../pre_commit_update_breeze_config_hash.py | 2 +-
.../ci/pre_commit/pre_commit_update_er_diagram.py | 14 +-
.../pre_commit_update_example_dags_paths.py | 6 +-
.../ci/pre_commit/pre_commit_update_versions.py | 12 +-
.../pre_commit_vendor_k8s_json_schema.py | 6 +-
.../ci/pre_commit/pre_commit_version_heads_map.py | 4 +-
scripts/ci/pre_commit/pre_commit_www_lint.py | 8 +-
scripts/ci/pre_commit/pre_commit_yaml_to_cfg.py | 6 +-
scripts/ci/runners/sync_authors.py | 14 +-
scripts/ci/testing/summarize_junit_failures.py | 36 +-
scripts/in_container/check_junitxml_result.py | 30 +-
scripts/in_container/run_migration_reference.py | 36 +-
scripts/in_container/run_prepare_er_diagram.py | 16 +-
scripts/in_container/run_resource_check.py | 10 +-
.../in_container/update_quarantined_test_status.py | 28 +-
scripts/in_container/verify_providers.py | 54 +-
scripts/tools/check_if_limited_dependencies.py | 2 +-
scripts/tools/generate-integrations-json.py | 30 +-
scripts/tools/initialize_virtualenv.py | 8 +-
setup.py | 434 +++++-----
tests/always/test_connection.py | 450 +++++------
tests/always/test_project_structure.py | 260 +++---
tests/always/test_secrets.py | 20 +-
tests/always/test_secrets_backends.py | 12 +-
tests/always/test_secrets_local_filesystem.py | 30 +-
tests/api/auth/backend/test_basic_auth.py | 4 +-
tests/api/auth/backend/test_kerberos_auth.py | 24 +-
tests/api/auth/test_client.py | 16 +-
tests/api/client/test_local_client.py | 36 +-
tests/api/common/experimental/test_pool.py | 34 +-
tests/api/common/test_delete_dag.py | 8 +-
tests/api/common/test_mark_tasks.py | 34 +-
tests/api/common/test_trigger_dag.py | 30 +-
tests/api/conftest.py | 2 +-
tests/api_connexion/conftest.py | 2 +-
.../endpoints/test_config_endpoint.py | 52 +-
.../endpoints/test_connection_endpoint.py | 242 +++---
tests/api_connexion/endpoints/test_dag_endpoint.py | 818 +++++++++----------
.../endpoints/test_dag_run_endpoint.py | 548 ++++++-------
.../endpoints/test_dag_source_endpoint.py | 16 +-
.../endpoints/test_dag_warning_endpoint.py | 54 +-
.../endpoints/test_dataset_endpoint.py | 100 +--
.../endpoints/test_event_log_endpoint.py | 56 +-
.../endpoints/test_extra_link_endpoint.py | 14 +-
.../endpoints/test_import_error_endpoint.py | 28 +-
tests/api_connexion/endpoints/test_log_endpoint.py | 122 +--
.../test_mapped_task_instance_endpoint.py | 80 +-
.../endpoints/test_plugin_endpoint.py | 48 +-
.../api_connexion/endpoints/test_pool_endpoint.py | 50 +-
.../endpoints/test_provider_endpoint.py | 50 +-
.../endpoints/test_role_and_permission_endpoint.py | 170 ++--
.../api_connexion/endpoints/test_task_endpoint.py | 74 +-
.../endpoints/test_task_instance_endpoint.py | 194 ++---
.../api_connexion/endpoints/test_user_endpoint.py | 188 ++---
.../endpoints/test_variable_endpoint.py | 42 +-
.../endpoints/test_version_endpoint.py | 2 +-
.../api_connexion/endpoints/test_xcom_endpoint.py | 200 ++---
tests/api_connexion/schemas/test_common_schema.py | 2 +-
tests/api_connexion/schemas/test_config_schema.py | 26 +-
.../schemas/test_connection_schema.py | 138 ++--
tests/api_connexion/schemas/test_dag_run_schema.py | 6 +-
tests/api_connexion/schemas/test_dag_schema.py | 148 ++--
tests/api_connexion/schemas/test_dataset_schema.py | 18 +-
.../api_connexion/schemas/test_event_log_schema.py | 8 +-
tests/api_connexion/schemas/test_plugin_schema.py | 64 +-
.../schemas/test_role_and_permission_schema.py | 28 +-
.../schemas/test_task_instance_schema.py | 16 +-
tests/api_connexion/schemas/test_task_schema.py | 14 +-
tests/api_connexion/schemas/test_user_schema.py | 70 +-
tests/api_connexion/schemas/test_version_schema.py | 2 +-
tests/api_connexion/schemas/test_xcom_schema.py | 122 +--
tests/api_connexion/test_auth.py | 12 +-
tests/api_connexion/test_cors.py | 26 +-
tests/api_connexion/test_error_handling.py | 8 +-
tests/api_connexion/test_parameters.py | 12 +-
tests/api_connexion/test_security.py | 2 +-
tests/callbacks/test_callback_requests.py | 10 +-
tests/charts/helm_template_generator.py | 16 +-
tests/charts/test_airflow_common.py | 36 +-
tests/charts/test_basic_helm_chart.py | 236 +++---
tests/charts/test_cleanup_pods.py | 6 +-
tests/charts/test_configmap.py | 10 +-
tests/charts/test_create_user_job.py | 6 +-
tests/charts/test_dag_processor.py | 12 +-
tests/charts/test_extra_configmaps_secrets.py | 4 +-
tests/charts/test_flower.py | 18 +-
tests/charts/test_git_sync_scheduler.py | 6 +-
tests/charts/test_git_sync_webserver.py | 4 +-
tests/charts/test_git_sync_worker.py | 4 +-
tests/charts/test_ingress_flower.py | 2 +-
tests/charts/test_ingress_web.py | 2 +-
tests/charts/test_keda.py | 28 +-
tests/charts/test_kerberos.py | 2 +-
tests/charts/test_pdb_pgbouncer.py | 2 +-
tests/charts/test_pdb_scheduler.py | 2 +-
tests/charts/test_pdb_webserver.py | 2 +-
tests/charts/test_pgbouncer.py | 8 +-
tests/charts/test_pod_launcher_role.py | 8 +-
tests/charts/test_pod_template_file.py | 12 +-
tests/charts/test_rbac.py | 120 +--
tests/charts/test_rbac_pod_log_reader.py | 16 +-
tests/charts/test_redis.py | 6 +-
tests/charts/test_scheduler.py | 14 +-
tests/charts/test_statsd.py | 8 +-
tests/charts/test_triggerer.py | 12 +-
tests/charts/test_webserver.py | 12 +-
tests/charts/test_worker.py | 12 +-
tests/cli/commands/test_celery_command.py | 242 +++---
tests/cli/commands/test_cheat_sheet_command.py | 32 +-
tests/cli/commands/test_config_command.py | 18 +-
tests/cli/commands/test_connection_command.py | 98 +--
tests/cli/commands/test_dag_command.py | 254 +++---
tests/cli/commands/test_dag_processor_command.py | 8 +-
tests/cli/commands/test_db_command.py | 272 +++----
tests/cli/commands/test_info_command.py | 48 +-
tests/cli/commands/test_jobs_command.py | 26 +-
tests/cli/commands/test_kerberos_command.py | 64 +-
tests/cli/commands/test_kubernetes_command.py | 152 ++--
tests/cli/commands/test_legacy_commands.py | 4 +-
tests/cli/commands/test_plugins_command.py | 60 +-
tests/cli/commands/test_pool_command.py | 42 +-
tests/cli/commands/test_role_command.py | 110 +--
.../cli/commands/test_rotate_fernet_key_command.py | 44 +-
tests/cli/commands/test_scheduler_command.py | 18 +-
tests/cli/commands/test_sync_perm_command.py | 4 +-
tests/cli/commands/test_task_command.py | 222 ++---
tests/cli/commands/test_triggerer_command.py | 2 +-
tests/cli/commands/test_user_command.py | 266 +++---
tests/cli/commands/test_variable_command.py | 78 +-
tests/cli/commands/test_version_command.py | 2 +-
tests/cli/commands/test_webserver_command.py | 98 +--
tests/cli/conftest.py | 10 +-
tests/cli/test_cli_parser.py | 30 +-
tests/cluster_policies/__init__.py | 10 +-
tests/conftest.py | 52 +-
tests/core/test_config_templates.py | 68 +-
tests/core/test_configuration.py | 692 ++++++++--------
tests/core/test_core.py | 38 +-
tests/core/test_impersonation_tests.py | 44 +-
tests/core/test_logging_config.py | 82 +-
tests/core/test_providers_manager.py | 132 +--
tests/core/test_sentry.py | 14 +-
tests/core/test_settings.py | 12 +-
tests/core/test_sqlalchemy_config.py | 52 +-
tests/core/test_stats.py | 102 +--
tests/core/test_templates.py | 20 +-
tests/dag_processing/test_manager.py | 168 ++--
tests/dag_processing/test_processor.py | 176 ++--
tests/dags/subdir2/test_dont_ignore_this.py | 4 +-
tests/dags/test_clear_subdag.py | 2 +-
tests/dags/test_cli_triggered_dags.py | 10 +-
tests/dags/test_dag_with_no_tags.py | 2 +-
tests/dags/test_dagrun_fast_follow.py | 32 +-
tests/dags/test_dags_folder.py | 2 +-
tests/dags/test_datasets.py | 32 +-
tests/dags/test_default_impersonation.py | 10 +-
tests/dags/test_default_views.py | 14 +-
tests/dags/test_double_trigger.py | 8 +-
tests/dags/test_example_bash_operator.py | 16 +-
tests/dags/test_heartbeat_failed_fast.py | 8 +-
tests/dags/test_impersonation.py | 10 +-
tests/dags/test_impersonation_subdag.py | 14 +-
tests/dags/test_invalid_cron.py | 4 +-
tests/dags/test_issue_1225.py | 46 +-
tests/dags/test_latest_runs.py | 4 +-
tests/dags/test_logging_in_dag.py | 4 +-
tests/dags/test_mapped_classic.py | 8 +-
tests/dags/test_mapped_taskflow.py | 4 +-
tests/dags/test_mark_state.py | 18 +-
tests/dags/test_miscellaneous.py | 18 +-
tests/dags/test_multiple_dags.py | 8 +-
tests/dags/test_no_impersonation.py | 8 +-
tests/dags/test_on_failure_callback.py | 16 +-
tests/dags/test_on_kill.py | 6 +-
tests/dags/test_only_empty_tasks.py | 2 +-
tests/dags/test_parsing_context.py | 4 +-
tests/dags/test_retry_handling_job.py | 20 +-
tests/dags/test_scheduler_dags.py | 10 +-
tests/dags/test_sensor.py | 4 +-
tests/dags/test_subdag.py | 20 +-
tests/dags/test_task_view_type_check.py | 10 +-
tests/dags_corrupted/test_impersonation_custom.py | 10 +-
tests/dags_corrupted/test_nonstring_owner.py | 2 +-
tests/dags_with_system_exit/a_system_exit.py | 2 +-
.../dags_with_system_exit/b_test_scheduler_dags.py | 4 +-
tests/dags_with_system_exit/c_system_exit.py | 2 +-
tests/decorators/test_external_python.py | 8 +-
tests/decorators/test_python.py | 84 +-
tests/decorators/test_python_virtualenv.py | 18 +-
tests/decorators/test_task_group.py | 22 +-
tests/executors/test_base_executor.py | 12 +-
tests/executors/test_celery_executor.py | 124 +--
tests/executors/test_celery_kubernetes_executor.py | 32 +-
tests/executors/test_dask_executor.py | 68 +-
tests/executors/test_kubernetes_executor.py | 366 ++++-----
tests/executors/test_local_executor.py | 44 +-
tests/executors/test_local_kubernetes_executor.py | 10 +-
tests/executors/test_sequential_executor.py | 12 +-
tests/hooks/test_subprocess.py | 36 +-
tests/jobs/test_backfill_job.py | 292 +++----
tests/jobs/test_base_job.py | 16 +-
tests/jobs/test_local_task_job.py | 158 ++--
tests/jobs/test_scheduler_job.py | 898 ++++++++++-----------
tests/jobs/test_triggerer_job.py | 38 +-
tests/kubernetes/models/test_secret.py | 116 +--
tests/kubernetes/test_client.py | 14 +-
tests/kubernetes/test_pod_generator.py | 422 +++++-----
tests/lineage/test_lineage.py | 28 +-
tests/macros/test_hive.py | 12 +-
tests/models/__init__.py | 2 +-
tests/models/test_base.py | 6 +-
tests/models/test_baseoperator.py | 88 +-
tests/models/test_cleartasks.py | 54 +-
tests/models/test_dag.py | 632 +++++++--------
tests/models/test_dagbag.py | 178 ++--
tests/models/test_dagcode.py | 20 +-
tests/models/test_dagrun.py | 394 ++++-----
tests/models/test_mappedoperator.py | 74 +-
tests/models/test_param.py | 130 +--
tests/models/test_pool.py | 46 +-
tests/models/test_renderedtifields.py | 30 +-
tests/models/test_serialized_dag.py | 10 +-
tests/models/test_skipmixin.py | 32 +-
tests/models/test_taskinstance.py | 786 +++++++++---------
tests/models/test_timestamp.py | 16 +-
tests/models/test_variable.py | 58 +-
tests/models/test_xcom.py | 12 +-
tests/operators/test_bash.py | 76 +-
tests/operators/test_branch_operator.py | 40 +-
tests/operators/test_datetime.py | 74 +-
tests/operators/test_email.py | 20 +-
tests/operators/test_generic_transfer.py | 50 +-
tests/operators/test_latest_only_operator.py | 72 +-
tests/operators/test_python.py | 200 ++---
tests/operators/test_subdag_operator.py | 132 +--
tests/operators/test_trigger_dagrun.py | 8 +-
tests/operators/test_weekday.py | 26 +-
tests/plugins/test_plugin.py | 16 +-
tests/plugins/test_plugin_ignore.py | 36 +-
tests/plugins/test_plugins_manager.py | 96 +--
tests/providers/airbyte/hooks/test_airbyte.py | 36 +-
tests/providers/airbyte/operators/test_airbyte.py | 12 +-
tests/providers/airbyte/sensors/test_airbyte.py | 18 +-
tests/providers/alibaba/cloud/hooks/test_oss.py | 56 +-
.../alibaba/cloud/log/test_oss_task_handler.py | 78 +-
.../alibaba/cloud/sensors/test_oss_key.py | 2 +-
tests/providers/alibaba/cloud/utils/oss_mock.py | 12 +-
tests/providers/amazon/aws/hooks/conftest.py | 2 +-
tests/providers/amazon/aws/hooks/test_appflow.py | 26 +-
tests/providers/amazon/aws/hooks/test_athena.py | 138 ++--
tests/providers/amazon/aws/hooks/test_base_aws.py | 210 ++---
.../amazon/aws/hooks/test_batch_client.py | 12 +-
.../amazon/aws/hooks/test_batch_waiters.py | 2 +-
.../amazon/aws/hooks/test_cloud_formation.py | 36 +-
tests/providers/amazon/aws/hooks/test_dms_task.py | 142 ++--
tests/providers/amazon/aws/hooks/test_dynamodb.py | 18 +-
tests/providers/amazon/aws/hooks/test_ec2.py | 20 +-
tests/providers/amazon/aws/hooks/test_ecs.py | 78 +-
tests/providers/amazon/aws/hooks/test_eks.py | 120 +--
.../hooks/test_elasticache_replication_group.py | 18 +-
tests/providers/amazon/aws/hooks/test_emr.py | 32 +-
.../amazon/aws/hooks/test_emr_containers.py | 40 +-
.../amazon/aws/hooks/test_emr_serverless.py | 66 +-
tests/providers/amazon/aws/hooks/test_glacier.py | 12 +-
tests/providers/amazon/aws/hooks/test_glue.py | 42 +-
.../amazon/aws/hooks/test_glue_catalog.py | 76 +-
.../amazon/aws/hooks/test_glue_crawler.py | 116 +--
tests/providers/amazon/aws/hooks/test_kinesis.py | 24 +-
.../amazon/aws/hooks/test_lambda_function.py | 6 +-
tests/providers/amazon/aws/hooks/test_logs.py | 18 +-
tests/providers/amazon/aws/hooks/test_rds.py | 6 +-
.../amazon/aws/hooks/test_redshift_cluster.py | 66 +-
.../amazon/aws/hooks/test_redshift_data.py | 6 +-
.../amazon/aws/hooks/test_redshift_sql.py | 24 +-
tests/providers/amazon/aws/hooks/test_s3.py | 420 +++++-----
tests/providers/amazon/aws/hooks/test_sagemaker.py | 466 +++++------
.../amazon/aws/hooks/test_secrets_manager.py | 34 +-
tests/providers/amazon/aws/hooks/test_ses.py | 20 +-
tests/providers/amazon/aws/hooks/test_sns.py | 36 +-
tests/providers/amazon/aws/hooks/test_sqs.py | 4 +-
.../amazon/aws/hooks/test_step_function.py | 20 +-
.../amazon/aws/log/test_cloudwatch_task_handler.py | 86 +-
.../amazon/aws/log/test_s3_task_handler.py | 82 +-
.../providers/amazon/aws/operators/test_appflow.py | 46 +-
.../providers/amazon/aws/operators/test_athena.py | 126 +--
tests/providers/amazon/aws/operators/test_batch.py | 12 +-
.../amazon/aws/operators/test_cloud_formation.py | 18 +-
.../amazon/aws/operators/test_datasync.py | 8 +-
.../amazon/aws/operators/test_dms_create_task.py | 66 +-
.../amazon/aws/operators/test_dms_delete_task.py | 28 +-
.../aws/operators/test_dms_describe_tasks.py | 52 +-
.../amazon/aws/operators/test_dms_start_task.py | 32 +-
.../amazon/aws/operators/test_dms_stop_task.py | 28 +-
tests/providers/amazon/aws/operators/test_ecs.py | 470 +++++------
tests/providers/amazon/aws/operators/test_eks.py | 18 +-
.../amazon/aws/operators/test_emr_add_steps.py | 98 +--
.../amazon/aws/operators/test_emr_containers.py | 86 +-
.../aws/operators/test_emr_create_job_flow.py | 88 +-
.../aws/operators/test_emr_modify_cluster.py | 22 +-
.../amazon/aws/operators/test_emr_serverless.py | 76 +-
.../aws/operators/test_emr_terminate_job_flow.py | 6 +-
.../amazon/aws/operators/test_example_s3_bucket.py | 2 +-
.../providers/amazon/aws/operators/test_glacier.py | 4 +-
tests/providers/amazon/aws/operators/test_glue.py | 74 +-
.../amazon/aws/operators/test_glue_crawler.py | 76 +-
.../providers/amazon/aws/operators/test_lambda.py | 4 +-
tests/providers/amazon/aws/operators/test_rds.py | 246 +++---
.../amazon/aws/operators/test_redshift_cluster.py | 32 +-
.../amazon/aws/operators/test_redshift_data.py | 10 +-
.../amazon/aws/operators/test_redshift_sql.py | 4 +-
.../amazon/aws/operators/test_s3_bucket_tagging.py | 2 +-
.../amazon/aws/operators/test_s3_file_transform.py | 22 +-
.../providers/amazon/aws/operators/test_s3_list.py | 10 +-
.../amazon/aws/operators/test_s3_list_prefixes.py | 12 +-
.../amazon/aws/operators/test_s3_object.py | 82 +-
.../amazon/aws/operators/test_sagemaker_base.py | 10 +-
.../aws/operators/test_sagemaker_endpoint.py | 94 +--
.../operators/test_sagemaker_endpoint_config.py | 48 +-
.../amazon/aws/operators/test_sagemaker_model.py | 44 +-
.../aws/operators/test_sagemaker_processing.py | 150 ++--
.../aws/operators/test_sagemaker_training.py | 96 +--
.../aws/operators/test_sagemaker_transform.py | 106 +--
.../amazon/aws/operators/test_sagemaker_tuning.py | 86 +-
tests/providers/amazon/aws/operators/test_sns.py | 4 +-
tests/providers/amazon/aws/operators/test_sqs.py | 48 +-
.../amazon/aws/operators/test_step_function.py | 28 +-
.../amazon/aws/secrets/test_secrets_manager.py | 120 +--
.../amazon/aws/secrets/test_systems_manager.py | 58 +-
tests/providers/amazon/aws/sensors/test_athena.py | 20 +-
tests/providers/amazon/aws/sensors/test_batch.py | 96 +--
.../amazon/aws/sensors/test_cloud_formation.py | 46 +-
.../providers/amazon/aws/sensors/test_dms_task.py | 28 +-
tests/providers/amazon/aws/sensors/test_eks.py | 38 +-
.../providers/amazon/aws/sensors/test_emr_base.py | 40 +-
.../amazon/aws/sensors/test_emr_containers.py | 28 +-
.../amazon/aws/sensors/test_emr_job_flow.py | 270 +++----
.../providers/amazon/aws/sensors/test_emr_step.py | 176 ++--
tests/providers/amazon/aws/sensors/test_glacier.py | 6 +-
tests/providers/amazon/aws/sensors/test_glue.py | 76 +-
.../aws/sensors/test_glue_catalog_partition.py | 36 +-
.../amazon/aws/sensors/test_glue_crawler.py | 26 +-
tests/providers/amazon/aws/sensors/test_rds.py | 100 +--
.../amazon/aws/sensors/test_redshift_cluster.py | 44 +-
tests/providers/amazon/aws/sensors/test_s3_key.py | 76 +-
.../amazon/aws/sensors/test_s3_keys_unchanged.py | 38 +-
.../amazon/aws/sensors/test_sagemaker_base.py | 46 +-
.../amazon/aws/sensors/test_sagemaker_endpoint.py | 44 +-
.../amazon/aws/sensors/test_sagemaker_training.py | 64 +-
.../amazon/aws/sensors/test_sagemaker_transform.py | 44 +-
.../amazon/aws/sensors/test_sagemaker_tuning.py | 44 +-
tests/providers/amazon/aws/sensors/test_sqs.py | 134 +--
.../amazon/aws/sensors/test_step_function.py | 24 +-
.../amazon/aws/system/utils/test_helpers.py | 40 +-
.../amazon/aws/transfers/test_dynamodb_to_s3.py | 36 +-
.../amazon/aws/transfers/test_ftp_to_s3.py | 26 +-
.../amazon/aws/transfers/test_gcs_to_s3.py | 64 +-
.../amazon/aws/transfers/test_google_api_to_s3.py | 146 ++--
.../amazon/aws/transfers/test_hive_to_dynamodb.py | 54 +-
.../aws/transfers/test_imap_attachment_to_s3.py | 32 +-
.../amazon/aws/transfers/test_local_to_s3.py | 46 +-
.../amazon/aws/transfers/test_mongo_to_s3.py | 44 +-
.../amazon/aws/transfers/test_redshift_to_s3.py | 32 +-
.../amazon/aws/transfers/test_s3_to_ftp.py | 12 +-
.../amazon/aws/transfers/test_s3_to_redshift.py | 48 +-
.../amazon/aws/transfers/test_s3_to_sftp.py | 36 +-
.../amazon/aws/transfers/test_salesforce_to_s3.py | 22 +-
.../amazon/aws/transfers/test_sftp_to_s3.py | 32 +-
.../amazon/aws/transfers/test_sql_to_s3.py | 16 +-
tests/providers/amazon/aws/utils/eks_test_utils.py | 38 +-
.../amazon/aws/utils/test_connection_wrapper.py | 4 +-
.../amazon/aws/utils/test_eks_get_token.py | 54 +-
tests/providers/apache/beam/hooks/test_beam.py | 118 +--
tests/providers/apache/beam/operators/test_beam.py | 194 ++---
.../apache/cassandra/hooks/test_cassandra.py | 64 +-
.../apache/cassandra/sensors/test_record.py | 14 +-
.../apache/cassandra/sensors/test_table.py | 14 +-
tests/providers/apache/drill/hooks/test_drill.py | 28 +-
.../providers/apache/drill/operators/test_drill.py | 10 +-
tests/providers/apache/druid/hooks/test_druid.py | 92 +--
.../providers/apache/druid/operators/test_druid.py | 12 +-
.../apache/druid/transfers/test_hive_to_druid.py | 64 +-
tests/providers/apache/hdfs/hooks/test_hdfs.py | 46 +-
tests/providers/apache/hdfs/hooks/test_webhdfs.py | 140 ++--
tests/providers/apache/hdfs/sensors/test_hdfs.py | 108 +--
.../providers/apache/hdfs/sensors/test_web_hdfs.py | 12 +-
tests/providers/apache/hive/__init__.py | 22 +-
tests/providers/apache/hive/hooks/test_hive.py | 506 ++++++------
tests/providers/apache/hive/operators/test_hive.py | 264 +++---
.../apache/hive/operators/test_hive_stats.py | 194 ++---
tests/providers/apache/hive/sensors/test_hdfs.py | 6 +-
.../apache/hive/sensors/test_hive_partition.py | 6 +-
.../hive/sensors/test_metastore_partition.py | 12 +-
.../hive/sensors/test_named_hive_partition.py | 46 +-
.../apache/hive/transfers/test_hive_to_mysql.py | 106 +--
.../apache/hive/transfers/test_hive_to_samba.py | 46 +-
.../apache/hive/transfers/test_mssql_to_hive.py | 36 +-
.../apache/hive/transfers/test_mysql_to_hive.py | 118 +--
.../apache/hive/transfers/test_s3_to_hive.py | 176 ++--
.../apache/hive/transfers/test_vertica_to_hive.py | 24 +-
tests/providers/apache/kylin/hooks/test_kylin.py | 44 +-
.../apache/kylin/operators/test_kylin_cube.py | 126 +--
tests/providers/apache/livy/hooks/test_livy.py | 318 ++++----
tests/providers/apache/livy/operators/test_livy.py | 68 +-
tests/providers/apache/livy/sensors/test_livy.py | 10 +-
tests/providers/apache/pig/hooks/test_pig.py | 18 +-
tests/providers/apache/pig/operators/test_pig.py | 8 +-
tests/providers/apache/pinot/hooks/test_pinot.py | 124 +--
.../apache/spark/hooks/test_spark_jdbc.py | 152 ++--
.../apache/spark/hooks/test_spark_jdbc_script.py | 124 +--
.../providers/apache/spark/hooks/test_spark_sql.py | 158 ++--
.../apache/spark/hooks/test_spark_submit.py | 522 ++++++------
.../apache/spark/operators/test_spark_jdbc.py | 170 ++--
.../apache/spark/operators/test_spark_sql.py | 60 +-
.../apache/spark/operators/test_spark_submit.py | 214 ++---
tests/providers/apache/sqoop/hooks/test_sqoop.py | 330 ++++----
.../providers/apache/sqoop/operators/test_sqoop.py | 194 ++---
tests/providers/arangodb/hooks/test_arangodb.py | 20 +-
.../providers/arangodb/operators/test_arangodb.py | 6 +-
tests/providers/arangodb/sensors/test_arangodb.py | 20 +-
tests/providers/asana/hooks/test_asana.py | 34 +-
tests/providers/atlassian/jira/hooks/test_jira.py | 6 +-
.../atlassian/jira/operators/test_jira.py | 20 +-
.../providers/atlassian/jira/sensors/test_jira.py | 20 +-
.../providers/celery/sensors/test_celery_queue.py | 24 +-
tests/providers/cloudant/hooks/test_cloudant.py | 10 +-
.../cncf/kubernetes/hooks/test_kubernetes.py | 136 ++--
.../kubernetes/operators/test_kubernetes_pod.py | 136 ++--
.../kubernetes/operators/test_spark_kubernetes.py | 184 ++---
.../kubernetes/sensors/test_spark_kubernetes.py | 12 +-
.../cncf/kubernetes/utils/test_pod_manager.py | 128 +--
tests/providers/common/sql/hooks/test_dbapi.py | 58 +-
tests/providers/common/sql/hooks/test_sqlparse.py | 14 +-
tests/providers/common/sql/operators/test_sql.py | 54 +-
tests/providers/common/sql/sensors/test_sql.py | 88 +-
.../providers/databricks/hooks/test_databricks.py | 506 ++++++------
.../databricks/hooks/test_databricks_sql.py | 18 +-
.../databricks/operators/test_databricks.py | 398 ++++-----
.../databricks/operators/test_databricks_repos.py | 64 +-
.../databricks/operators/test_databricks_sql.py | 94 +--
.../databricks/triggers/test_databricks.py | 88 +-
tests/providers/databricks/utils/databricks.py | 32 +-
tests/providers/datadog/hooks/test_datadog.py | 66 +-
tests/providers/datadog/sensors/test_datadog.py | 82 +-
tests/providers/dingding/hooks/test_dingding.py | 160 ++--
.../providers/dingding/operators/test_dingding.py | 38 +-
.../discord/hooks/test_discord_webhook.py | 42 +-
.../discord/operators/test_discord_webhook.py | 36 +-
tests/providers/docker/decorators/test_docker.py | 2 +-
tests/providers/docker/hooks/test_docker.py | 84 +-
tests/providers/docker/operators/test_docker.py | 400 ++++-----
.../docker/operators/test_docker_swarm.py | 98 +--
.../elasticsearch/hooks/test_elasticsearch.py | 32 +-
.../elasticsearch/log/elasticmock/__init__.py | 2 +-
.../log/elasticmock/fake_elasticsearch.py | 324 ++++----
.../log/elasticmock/utilities/__init__.py | 2 +-
.../elasticsearch/log/test_es_task_handler.py | 206 ++---
tests/providers/exasol/hooks/test_exasol.py | 52 +-
tests/providers/exasol/operators/test_exasol.py | 24 +-
tests/providers/ftp/hooks/test_ftp.py | 40 +-
tests/providers/ftp/sensors/test_ftp.py | 8 +-
tests/providers/github/hooks/test_github.py | 6 +-
tests/providers/github/operators/test_github.py | 16 +-
tests/providers/github/sensors/test_github.py | 16 +-
tests/providers/google/ads/hooks/test_ads.py | 6 +-
.../_internal_client/test_secret_manager_client.py | 28 +-
.../providers/google/cloud/hooks/test_bigquery.py | 616 +++++++-------
.../google/cloud/hooks/test_bigquery_system.py | 14 +-
.../providers/google/cloud/hooks/test_bigtable.py | 206 ++---
.../google/cloud/hooks/test_cloud_build.py | 60 +-
.../google/cloud/hooks/test_cloud_composer.py | 12 +-
.../google/cloud/hooks/test_cloud_memorystore.py | 36 +-
.../providers/google/cloud/hooks/test_cloud_sql.py | 530 ++++++------
.../hooks/test_cloud_storage_transfer_service.py | 266 +++---
tests/providers/google/cloud/hooks/test_compute.py | 252 +++---
.../google/cloud/hooks/test_compute_ssh_system.py | 30 +-
.../providers/google/cloud/hooks/test_dataflow.py | 380 ++++-----
.../providers/google/cloud/hooks/test_dataplex.py | 8 +-
.../providers/google/cloud/hooks/test_dataproc.py | 28 +-
.../providers/google/cloud/hooks/test_datastore.py | 134 +--
tests/providers/google/cloud/hooks/test_dlp.py | 88 +-
.../providers/google/cloud/hooks/test_functions.py | 118 +--
tests/providers/google/cloud/hooks/test_gcs.py | 356 ++++----
tests/providers/google/cloud/hooks/test_gdm.py | 20 +-
.../google/cloud/hooks/test_kms_system.py | 4 +-
.../google/cloud/hooks/test_kubernetes_engine.py | 46 +-
.../google/cloud/hooks/test_life_sciences.py | 28 +-
tests/providers/google/cloud/hooks/test_looker.py | 6 +-
.../providers/google/cloud/hooks/test_mlengine.py | 380 ++++-----
tests/providers/google/cloud/hooks/test_pubsub.py | 156 ++--
.../google/cloud/hooks/test_secret_manager.py | 24 +-
.../cloud/hooks/test_secret_manager_system.py | 4 +-
tests/providers/google/cloud/hooks/test_spanner.py | 290 +++----
.../google/cloud/hooks/test_stackdriver.py | 86 +-
tests/providers/google/cloud/hooks/test_tasks.py | 26 +-
.../providers/google/cloud/hooks/test_translate.py | 40 +-
.../google/cloud/hooks/test_video_intelligence.py | 2 +-
tests/providers/google/cloud/hooks/test_vision.py | 106 +--
.../google/cloud/log/test_gcs_task_handler.py | 2 +-
.../cloud/log/test_gcs_task_handler_system.py | 12 +-
.../cloud/log/test_stackdriver_task_handler.py | 168 ++--
.../log/test_stackdriver_task_handler_system.py | 10 +-
.../google/cloud/operators/test_automl_system.py | 4 +-
.../google/cloud/operators/test_bigquery.py | 238 +++---
.../google/cloud/operators/test_bigquery_dts.py | 12 +-
.../cloud/operators/test_bigquery_dts_system.py | 2 +-
.../google/cloud/operators/test_bigtable.py | 152 ++--
.../google/cloud/operators/test_bigtable_system.py | 16 +-
.../google/cloud/operators/test_cloud_build.py | 8 +-
.../cloud/operators/test_cloud_composer_system.py | 2 +-
.../google/cloud/operators/test_cloud_sql.py | 64 +-
.../test_cloud_storage_transfer_service.py | 256 +++---
.../google/cloud/operators/test_compute.py | 324 ++++----
.../google/cloud/operators/test_compute_system.py | 2 +-
.../cloud/operators/test_compute_system_helper.py | 184 ++---
.../google/cloud/operators/test_datacatalog.py | 34 +-
.../google/cloud/operators/test_dataflow.py | 202 ++---
.../google/cloud/operators/test_dataflow_system.py | 52 +-
.../cloud/operators/test_datafusion_system.py | 2 +-
.../google/cloud/operators/test_dataprep_system.py | 2 +-
.../google/cloud/operators/test_dataproc.py | 114 +--
.../google/cloud/operators/test_datastore.py | 8 +-
.../cloud/operators/test_datastore_system.py | 4 +-
.../google/cloud/operators/test_functions.py | 352 ++++----
tests/providers/google/cloud/operators/test_gcs.py | 16 +-
.../cloud/operators/test_kubernetes_engine.py | 216 ++---
.../google/cloud/operators/test_life_sciences.py | 12 +-
.../google/cloud/operators/test_looker.py | 4 +-
.../google/cloud/operators/test_mlengine.py | 482 +++++------
.../google/cloud/operators/test_mlengine_system.py | 2 +-
.../google/cloud/operators/test_mlengine_utils.py | 146 ++--
.../google/cloud/operators/test_pubsub.py | 42 +-
.../google/cloud/operators/test_spanner.py | 30 +-
.../google/cloud/operators/test_stackdriver.py | 78 +-
.../providers/google/cloud/operators/test_tasks.py | 62 +-
.../google/cloud/operators/test_translate.py | 38 +-
.../cloud/operators/test_translate_speech.py | 52 +-
.../google/cloud/operators/test_vertex_ai.py | 68 +-
.../google/cloud/operators/test_vision.py | 114 +--
.../google/cloud/operators/test_vision_system.py | 18 +-
.../google/cloud/operators/test_workflows.py | 8 +-
.../google/cloud/secrets/test_secret_manager.py | 34 +-
.../cloud/secrets/test_secret_manager_system.py | 8 +-
.../google/cloud/sensors/test_bigquery.py | 10 +-
.../google/cloud/sensors/test_bigtable.py | 24 +-
.../sensors/test_cloud_storage_transfer_service.py | 66 +-
.../google/cloud/sensors/test_dataflow.py | 2 +-
.../google/cloud/sensors/test_datafusion.py | 2 +-
tests/providers/google/cloud/sensors/test_gcs.py | 78 +-
.../providers/google/cloud/sensors/test_looker.py | 10 +-
.../providers/google/cloud/sensors/test_pubsub.py | 20 +-
tests/providers/google/cloud/sensors/test_tasks.py | 4 +-
.../google/cloud/transfers/test_adls_to_gcs.py | 42 +-
.../cloud/transfers/test_azure_fileshare_to_gcs.py | 32 +-
.../cloud/transfers/test_bigquery_to_bigquery.py | 18 +-
.../google/cloud/transfers/test_bigquery_to_gcs.py | 46 +-
.../cloud/transfers/test_bigquery_to_mssql.py | 16 +-
.../cloud/transfers/test_bigquery_to_mysql.py | 14 +-
.../google/cloud/transfers/test_calendar_to_gcs.py | 16 +-
.../transfers/test_facebook_ads_to_gcs_system.py | 8 +-
.../google/cloud/transfers/test_gcs_to_bigquery.py | 36 +-
.../google/cloud/transfers/test_gcs_to_gcs.py | 196 ++---
.../google/cloud/transfers/test_gcs_to_local.py | 4 +-
.../google/cloud/transfers/test_local_to_gcs.py | 80 +-
.../google/cloud/transfers/test_mssql_to_gcs.py | 46 +-
.../google/cloud/transfers/test_mysql_to_gcs.py | 128 +--
.../google/cloud/transfers/test_oracle_to_gcs.py | 46 +-
.../google/cloud/transfers/test_postgres_to_gcs.py | 42 +-
.../cloud/transfers/test_postgres_to_gcs_system.py | 2 +-
.../google/cloud/transfers/test_s3_to_gcs.py | 38 +-
.../cloud/transfers/test_salesforce_to_gcs.py | 22 +-
.../transfers/test_salesforce_to_gcs_system.py | 6 +-
.../google/cloud/transfers/test_sql_to_gcs.py | 100 +--
.../providers/google/cloud/utils/base_gcp_mock.py | 12 +-
.../google/cloud/utils/gcp_authenticator.py | 100 +--
.../cloud/utils/test_credentials_provider.py | 86 +-
.../google/cloud/utils/test_field_validator.py | 74 +-
.../cloud/utils/test_mlengine_operator_utils.py | 22 +-
.../utils/test_mlengine_prediction_summary.py | 10 +-
.../common/auth_backend/test_google_openid.py | 2 +-
.../google/common/hooks/test_base_google.py | 266 +++---
.../google/common/hooks/test_discovery_api.py | 66 +-
.../google/firebase/hooks/test_firestore.py | 20 +-
.../providers/google/leveldb/hooks/test_leveldb.py | 86 +-
.../google/leveldb/operators/test_leveldb.py | 18 +-
.../hooks/test_campaign_manager.py | 4 +-
.../operators/test_display_video.py | 2 +-
.../operators/test_display_video_system.py | 8 +-
.../providers/google/suite/hooks/test_calendar.py | 28 +-
tests/providers/google/suite/hooks/test_sheets.py | 26 +-
.../google/suite/operators/test_sheets.py | 4 +-
tests/providers/google/test_go_module.py | 8 +-
tests/providers/grpc/hooks/test_grpc.py | 102 +--
tests/providers/grpc/operators/test_grpc.py | 16 +-
.../_internal_client/test_vault_client.py | 344 ++++----
tests/providers/hashicorp/hooks/test_vault.py | 298 +++----
tests/providers/hashicorp/secrets/test_vault.py | 210 ++---
tests/providers/http/hooks/test_http.py | 200 ++---
tests/providers/http/operators/test_http.py | 44 +-
tests/providers/http/sensors/test_http.py | 106 +--
tests/providers/imap/hooks/test_imap.py | 120 +--
.../providers/imap/sensors/test_imap_attachment.py | 18 +-
tests/providers/influxdb/hooks/test_influxdb.py | 14 +-
.../providers/influxdb/operators/test_influxdb.py | 6 +-
tests/providers/jdbc/hooks/test_jdbc.py | 38 +-
tests/providers/jdbc/operators/test_jdbc.py | 6 +-
tests/providers/jenkins/hooks/test_jenkins.py | 40 +-
.../jenkins/operators/test_jenkins_job_trigger.py | 134 +--
tests/providers/jenkins/sensors/test_jenkins.py | 10 +-
tests/providers/microsoft/azure/hooks/test_adx.py | 106 +--
tests/providers/microsoft/azure/hooks/test_asb.py | 44 +-
.../microsoft/azure/hooks/test_azure_batch.py | 24 +-
.../azure/hooks/test_azure_container_instance.py | 66 +-
.../azure/hooks/test_azure_container_registry.py | 18 +-
.../azure/hooks/test_azure_container_volume.py | 34 +-
.../microsoft/azure/hooks/test_azure_cosmos.py | 164 ++--
.../microsoft/azure/hooks/test_azure_data_lake.py | 80 +-
.../microsoft/azure/hooks/test_azure_fileshare.py | 180 ++---
.../microsoft/azure/hooks/test_base_azure.py | 30 +-
tests/providers/microsoft/azure/hooks/test_wasb.py | 236 +++---
.../microsoft/azure/log/test_wasb_task_handler.py | 50 +-
.../microsoft/azure/operators/test_adls_delete.py | 6 +-
.../microsoft/azure/operators/test_adls_list.py | 6 +-
.../microsoft/azure/operators/test_adx.py | 42 +-
.../microsoft/azure/operators/test_asb.py | 14 +-
.../microsoft/azure/operators/test_azure_batch.py | 16 +-
.../operators/test_azure_container_instances.py | 178 ++--
.../microsoft/azure/operators/test_azure_cosmos.py | 32 +-
.../azure/operators/test_azure_synapse.py | 22 +-
.../azure/operators/test_wasb_delete_blob.py | 22 +-
.../azure/secrets/test_azure_key_vault.py | 70 +-
.../microsoft/azure/sensors/test_azure_cosmos.py | 10 +-
.../providers/microsoft/azure/sensors/test_wasb.py | 64 +-
.../azure/transfers/test_local_to_adls.py | 16 +-
.../azure/transfers/test_local_to_wasb.py | 38 +-
.../transfers/test_oracle_to_azure_data_lake.py | 46 +-
.../microsoft/azure/transfers/test_sftp_to_wasb.py | 26 +-
.../providers/microsoft/mssql/hooks/test_mssql.py | 48 +-
.../microsoft/mssql/operators/test_mssql.py | 10 +-
tests/providers/microsoft/psrp/hooks/test_psrp.py | 42 +-
.../microsoft/psrp/operators/test_psrp.py | 18 +-
.../providers/microsoft/winrm/hooks/test_winrm.py | 62 +-
.../microsoft/winrm/operators/test_winrm.py | 6 +-
tests/providers/mongo/hooks/test_mongo.py | 194 ++---
tests/providers/mongo/sensors/test_mongo.py | 30 +-
tests/providers/mysql/hooks/test_mysql.py | 172 ++--
tests/providers/mysql/operators/test_mysql.py | 16 +-
.../mysql/transfers/test_presto_to_mysql.py | 34 +-
.../providers/mysql/transfers/test_s3_to_mysql.py | 64 +-
.../mysql/transfers/test_trino_to_mysql.py | 34 +-
.../mysql/transfers/test_vertica_to_mysql.py | 40 +-
tests/providers/neo4j/hooks/test_neo4j.py | 30 +-
tests/providers/neo4j/operators/test_neo4j.py | 8 +-
tests/providers/odbc/hooks/test_odbc.py | 132 +--
tests/providers/openfaas/hooks/test_openfaas.py | 28 +-
tests/providers/opsgenie/hooks/test_opsgenie.py | 96 +--
.../providers/opsgenie/operators/test_opsgenie.py | 158 ++--
tests/providers/oracle/hooks/test_oracle.py | 216 ++---
tests/providers/oracle/operators/test_oracle.py | 18 +-
.../oracle/transfers/test_oracle_to_oracle.py | 18 +-
.../papermill/operators/test_papermill.py | 8 +-
tests/providers/plexus/hooks/test_plexus.py | 28 +-
tests/providers/plexus/operators/test_job.py | 126 +--
tests/providers/postgres/hooks/test_postgres.py | 82 +-
.../providers/postgres/operators/test_postgres.py | 20 +-
tests/providers/presto/hooks/test_presto.py | 168 ++--
.../providers/presto/transfers/test_gcs_presto.py | 8 +-
tests/providers/qubole/hooks/test_qubole.py | 44 +-
tests/providers/qubole/hooks/test_qubole_check.py | 8 +-
tests/providers/qubole/operators/test_qubole.py | 70 +-
.../qubole/operators/test_qubole_check.py | 58 +-
tests/providers/qubole/sensors/test_qubole.py | 14 +-
tests/providers/redis/hooks/test_redis.py | 34 +-
.../redis/operators/test_redis_publish.py | 22 +-
tests/providers/redis/sensors/test_redis_key.py | 12 +-
.../providers/redis/sensors/test_redis_pub_sub.py | 48 +-
.../providers/salesforce/hooks/test_salesforce.py | 50 +-
tests/providers/salesforce/operators/test_bulk.py | 80 +-
.../operators/test_salesforce_apex_rest.py | 8 +-
tests/providers/samba/hooks/test_samba.py | 34 +-
tests/providers/segment/hooks/test_segment.py | 8 +-
.../segment/operators/test_segment_track_event.py | 16 +-
tests/providers/sendgrid/utils/test_emailer.py | 76 +-
tests/providers/sftp/hooks/test_sftp.py | 138 ++--
tests/providers/sftp/operators/test_sftp.py | 104 +--
tests/providers/sftp/sensors/test_sftp.py | 96 +--
.../singularity/operators/test_singularity.py | 58 +-
tests/providers/slack/hooks/test_slack.py | 76 +-
tests/providers/slack/hooks/test_slack_webhook.py | 42 +-
tests/providers/slack/operators/test_slack.py | 98 +--
.../slack/operators/test_slack_webhook.py | 24 +-
.../providers/slack/transfers/test_sql_to_slack.py | 156 ++--
tests/providers/snowflake/hooks/test_snowflake.py | 540 ++++++-------
.../snowflake/operators/test_snowflake.py | 18 +-
.../snowflake/transfers/test_s3_to_snowflake.py | 18 +-
.../snowflake/transfers/test_snowflake_to_slack.py | 106 +--
tests/providers/sqlite/hooks/test_sqlite.py | 50 +-
tests/providers/sqlite/operators/test_sqlite.py | 14 +-
tests/providers/ssh/hooks/test_ssh.py | 458 +++++------
tests/providers/ssh/operators/test_ssh.py | 32 +-
tests/providers/tableau/hooks/test_tableau.py | 164 ++--
tests/providers/tableau/operators/test_tableau.py | 48 +-
.../operators/test_tableau_refresh_workbook.py | 16 +-
tests/providers/tableau/sensors/test_tableau.py | 6 +-
tests/providers/tabular/hooks/test_tabular.py | 16 +-
tests/providers/telegram/hooks/test_telegram.py | 82 +-
.../providers/telegram/operators/test_telegram.py | 82 +-
tests/providers/trino/hooks/test_trino.py | 144 ++--
tests/providers/trino/operators/test_trino.py | 2 +-
tests/providers/trino/transfers/test_gcs_trino.py | 8 +-
tests/providers/vertica/hooks/test_vertica.py | 32 +-
tests/providers/vertica/operators/test_vertica.py | 4 +-
tests/providers/yandex/hooks/test_yandex.py | 50 +-
.../yandex/hooks/test_yandexcloud_dataproc.py | 136 ++--
.../yandex/operators/test_yandexcloud_dataproc.py | 324 ++++----
tests/providers/zendesk/hooks/test_zendesk.py | 32 +-
tests/security/test_kerberos.py | 232 +++---
tests/sensors/test_base.py | 60 +-
tests/sensors/test_bash.py | 12 +-
tests/sensors/test_external_task_sensor.py | 174 ++--
tests/sensors/test_filesystem.py | 54 +-
tests/sensors/test_python.py | 22 +-
tests/sensors/test_time_delta.py | 8 +-
tests/sensors/test_timeout_sensor.py | 14 +-
tests/sensors/test_weekday_sensor.py | 24 +-
tests/serialization/test_dag_serialization.py | 750 ++++++++---------
.../airbyte/example_airbyte_trigger_job.py | 10 +-
.../system/providers/alibaba/example_oss_bucket.py | 8 +-
.../system/providers/alibaba/example_oss_object.py | 24 +-
.../system/providers/amazon/aws/example_athena.py | 70 +-
tests/system/providers/amazon/aws/example_batch.py | 104 +--
.../providers/amazon/aws/example_cloudformation.py | 34 +-
.../providers/amazon/aws/example_datasync.py | 98 +--
tests/system/providers/amazon/aws/example_dms.py | 224 ++---
.../providers/amazon/aws/example_dynamodb_to_s3.py | 50 +-
tests/system/providers/amazon/aws/example_ec2.py | 38 +-
tests/system/providers/amazon/aws/example_ecs.py | 66 +-
.../providers/amazon/aws/example_ecs_fargate.py | 60 +-
.../aws/example_eks_with_fargate_in_one_step.py | 44 +-
.../amazon/aws/example_eks_with_fargate_profile.py | 42 +-
.../aws/example_eks_with_nodegroup_in_one_step.py | 36 +-
.../amazon/aws/example_eks_with_nodegroups.py | 42 +-
.../system/providers/amazon/aws/example_emr_eks.py | 80 +-
.../providers/amazon/aws/example_emr_serverless.py | 28 +-
tests/system/providers/amazon/aws/example_glue.py | 68 +-
.../amazon/aws/example_google_api_youtube_to_s3.py | 90 +--
.../system/providers/amazon/aws/example_lambda.py | 30 +-
.../providers/amazon/aws/example_local_to_s3.py | 24 +-
.../providers/amazon/aws/example_quicksight.py | 60 +-
.../providers/amazon/aws/example_rds_event.py | 24 +-
.../providers/amazon/aws/example_rds_export.py | 50 +-
.../providers/amazon/aws/example_rds_instance.py | 30 +-
.../providers/amazon/aws/example_rds_snapshot.py | 42 +-
.../providers/amazon/aws/example_redshift.py | 100 +--
.../amazon/aws/example_redshift_s3_transfers.py | 98 +--
tests/system/providers/amazon/aws/example_s3.py | 50 +-
.../providers/amazon/aws/example_sagemaker.py | 184 ++---
.../amazon/aws/example_sagemaker_endpoint.py | 186 ++---
tests/system/providers/amazon/aws/example_sns.py | 18 +-
tests/system/providers/amazon/aws/example_sqs.py | 24 +-
.../providers/amazon/aws/example_step_functions.py | 18 +-
.../system/providers/amazon/aws/utils/__init__.py | 58 +-
tests/system/providers/amazon/aws/utils/ec2.py | 60 +-
tests/system/providers/apache/beam/example_beam.py | 8 +-
.../apache/beam/example_beam_java_flink.py | 8 +-
.../apache/beam/example_beam_java_spark.py | 8 +-
tests/system/providers/apache/beam/example_go.py | 22 +-
.../providers/apache/beam/example_go_dataflow.py | 14 +-
.../providers/apache/beam/example_java_dataflow.py | 10 +-
.../system/providers/apache/beam/example_python.py | 44 +-
.../apache/beam/example_python_dataflow.py | 16 +-
tests/system/providers/apache/beam/utils.py | 36 +-
.../apache/cassandra/example_cassandra_dag.py | 4 +-
.../providers/apache/drill/example_drill_dag.py | 8 +-
.../providers/apache/druid/example_druid_dag.py | 4 +-
.../providers/apache/hive/example_twitter_dag.py | 10 +-
.../providers/apache/kylin/example_kylin_dag.py | 30 +-
tests/system/providers/apache/livy/example_livy.py | 14 +-
tests/system/providers/apache/pig/example_pig.py | 4 +-
.../providers/apache/spark/example_spark_dag.py | 6 +-
.../cncf/kubernetes/example_kubernetes.py | 38 +-
.../cncf/kubernetes/example_spark_kubernetes.py | 8 +-
.../providers/databricks/example_databricks.py | 26 +-
.../databricks/example_databricks_repos.py | 30 +-
.../providers/databricks/example_databricks_sql.py | 18 +-
.../system/providers/dingding/example_dingding.py | 118 +--
tests/system/providers/docker/example_docker.py | 18 +-
.../providers/docker/example_docker_copy_data.py | 2 +-
.../providers/docker/example_docker_swarm.py | 16 +-
.../example_taskflow_api_docker_virtualenv.py | 6 +-
.../elasticsearch/example_elasticsearch_query.py | 10 +-
tests/system/providers/github/example_github.py | 18 +-
tests/system/providers/google/ads/example_ads.py | 2 +-
.../automl/example_automl_nl_text_extraction.py | 6 +-
.../automl/example_automl_vision_classification.py | 2 +-
.../cloud/azure/example_azure_fileshare_to_gcs.py | 24 +-
.../cloud/bigquery/example_bigquery_queries.py | 6 +-
.../bigquery/example_bigquery_queries_async.py | 2 +-
.../cloud/bigquery/example_bigquery_to_mssql.py | 2 +-
.../cloud/bigquery/example_bigquery_transfer.py | 2 +-
.../cloud/cloud_build/example_cloud_build.py | 16 +-
.../cloud_build/example_cloud_build_trigger.py | 6 +-
.../cloud/cloud_functions/example_functions.py | 34 +-
.../example_cloud_memorystore_memcached.py | 4 +-
.../example_cloud_memorystore_redis.py | 4 +-
.../google/cloud/cloud_sql/example_cloud_sql.py | 32 +-
.../cloud/compute_igm/example_compute_igm.py | 30 +-
.../example_dlp_deidentify_content.py | 2 +-
.../data_loss_prevention/example_dlp_info_types.py | 2 +-
.../example_dlp_inspect_template.py | 2 +-
.../cloud/data_loss_prevention/example_dlp_job.py | 2 +-
.../example_dlp_job_trigger.py | 2 +-
.../google/cloud/dataform/example_dataform.py | 12 +-
.../cloud/dataproc/example_dataproc_batch.py | 2 +-
.../dataproc/example_dataproc_batch_persistent.py | 2 +-
.../dataproc/example_dataproc_cluster_generator.py | 6 +-
.../google/cloud/dataproc/example_dataproc_gke.py | 6 +-
.../cloud/dataproc/example_dataproc_hadoop.py | 2 +-
.../google/cloud/dataproc/example_dataproc_hive.py | 2 +-
.../google/cloud/dataproc/example_dataproc_pig.py | 2 +-
.../cloud/dataproc/example_dataproc_pyspark.py | 2 +-
.../cloud/dataproc/example_dataproc_spark.py | 2 +-
.../cloud/dataproc/example_dataproc_spark_async.py | 4 +-
.../dataproc/example_dataproc_spark_deferrable.py | 2 +-
.../cloud/dataproc/example_dataproc_spark_sql.py | 2 +-
.../cloud/dataproc/example_dataproc_sparkr.py | 2 +-
.../cloud/dataproc/example_dataproc_update.py | 2 +-
.../cloud/dataproc/example_dataproc_workflow.py | 4 +-
.../google/cloud/dataproc/resources/hello_world.py | 2 +-
.../cloud/datastore/example_datastore_commit.py | 2 +-
.../datastore/example_datastore_export_import.py | 8 +-
.../cloud/datastore/example_datastore_query.py | 2 +-
.../cloud/datastore/example_datastore_rollback.py | 2 +-
.../google/cloud/gcs/example_calendar_to_gcs.py | 2 +-
.../google/cloud/gcs/example_firestore.py | 2 +-
.../providers/google/cloud/gcs/example_gcs_acl.py | 2 +-
.../google/cloud/gcs/example_gcs_copy_delete.py | 2 +-
.../google/cloud/gcs/example_gcs_sensor.py | 4 +-
.../google/cloud/gcs/example_gcs_to_bigquery.py | 20 +-
.../google/cloud/gcs/example_gcs_to_gcs.py | 4 +-
.../google/cloud/gcs/example_gcs_to_gdrive.py | 2 +-
.../google/cloud/gcs/example_gcs_to_sheets.py | 2 +-
.../google/cloud/gcs/example_gcs_transform.py | 2 +-
.../cloud/gcs/example_gcs_transform_timespan.py | 2 +-
.../cloud/gcs/example_gcs_upload_download.py | 2 +-
.../google/cloud/gcs/example_mssql_to_gcs.py | 12 +-
.../google/cloud/gcs/example_mysql_to_gcs.py | 8 +-
.../google/cloud/gcs/example_oracle_to_gcs.py | 8 +-
.../google/cloud/gcs/example_s3_to_gcs.py | 18 +-
.../providers/google/cloud/gcs/example_sheets.py | 2 +-
.../google/cloud/gcs/example_sheets_to_gcs.py | 2 +-
.../google/cloud/gcs/example_trino_to_gcs.py | 4 +-
.../google/cloud/gcs/resources/transform_script.py | 2 +-
.../cloud/gcs/resources/transform_timespan.py | 4 +-
.../kubernetes_engine/example_kubernetes_engine.py | 6 +-
.../cloud/life_sciences/example_life_sciences.py | 8 +-
.../natural_language/example_natural_language.py | 4 +-
.../google/cloud/pubsub/example_pubsub.py | 2 +-
.../google/cloud/spanner/example_spanner.py | 24 +-
.../cloud/sql_to_sheets/example_sql_to_sheets.py | 2 +-
.../cloud/stackdriver/example_stackdriver.py | 36 +-
.../providers/google/cloud/tasks/example_queue.py | 4 +-
.../providers/google/cloud/tasks/example_tasks.py | 4 +-
.../google/cloud/transfers/example_gcs_to_sftp.py | 4 +-
.../translate_speech/example_translate_speech.py | 12 +-
.../example_video_intelligence.py | 2 +-
.../google/cloud/workflows/example_workflows.py | 2 +-
.../datacatalog/example_datacatalog_entries.py | 2 +-
.../example_datacatalog_search_catalog.py | 4 +-
.../example_datacatalog_tag_templates.py | 2 +-
.../google/datacatalog/example_datacatalog_tags.py | 4 +-
.../providers/google/leveldb/example_leveldb.py | 14 +-
.../google/marketing_platform/example_analytics.py | 2 +-
.../marketing_platform/example_campaign_manager.py | 2 +-
.../marketing_platform/example_search_ads.py | 2 +-
tests/system/providers/http/example_http.py | 48 +-
.../system/providers/influxdb/example_influxdb.py | 4 +-
.../providers/influxdb/example_influxdb_query.py | 6 +-
.../system/providers/jdbc/example_jdbc_queries.py | 18 +-
.../jenkins/example_jenkins_job_trigger.py | 2 +-
.../microsoft/azure/example_adls_delete.py | 8 +-
.../azure/example_azure_container_instances.py | 16 +-
.../microsoft/azure/example_azure_cosmosdb.py | 14 +-
.../providers/microsoft/azure/example_fileshare.py | 2 +-
.../microsoft/azure/example_local_to_adls.py | 8 +-
.../microsoft/azure/example_local_to_wasb.py | 2 +-
.../providers/microsoft/mssql/example_mssql.py | 42 +-
.../providers/microsoft/winrm/example_winrm.py | 14 +-
tests/system/providers/mysql/example_mysql.py | 10 +-
tests/system/providers/neo4j/example_neo4j.py | 6 +-
.../providers/papermill/example_papermill.py | 4 +-
.../papermill/example_papermill_verify.py | 6 +-
tests/system/providers/plexus/example_plexus.py | 24 +-
.../system/providers/postgres/example_postgres.py | 2 +-
.../providers/presto/example_gcs_to_presto.py | 2 +-
tests/system/providers/qubole/example_qubole.py | 84 +-
.../providers/qubole/example_qubole_sensors.py | 6 +-
tests/system/providers/salesforce/example_bulk.py | 20 +-
.../salesforce/example_salesforce_apex_rest.py | 2 +-
.../providers/singularity/example_singularity.py | 14 +-
tests/system/providers/slack/example_slack.py | 4 +-
.../system/providers/slack/example_sql_to_slack.py | 6 +-
.../providers/snowflake/example_s3_to_snowflake.py | 14 +-
.../providers/snowflake/example_snowflake.py | 20 +-
.../snowflake/example_snowflake_to_slack.py | 10 +-
tests/system/providers/sqlite/example_sqlite.py | 22 +-
tests/system/providers/tableau/example_tableau.py | 28 +-
.../tableau/example_tableau_refresh_workbook.py | 26 +-
.../system/providers/telegram/example_telegram.py | 6 +-
.../system/providers/trino/example_gcs_to_trino.py | 2 +-
tests/system/providers/trino/example_trino.py | 4 +-
.../system/providers/yandex/example_yandexcloud.py | 34 +-
.../yandex/example_yandexcloud_dataproc.py | 116 +--
.../example_yandexcloud_dataproc_lightweight.py | 22 +-
tests/task/task_runner/test_base_task_runner.py | 6 +-
tests/task/task_runner/test_cgroup_task_runner.py | 2 +-
.../task/task_runner/test_standard_task_runner.py | 80 +-
tests/task/task_runner/test_task_runner.py | 8 +-
tests/test_utils/api_connexion_utils.py | 8 +-
tests/test_utils/asserts.py | 6 +-
tests/test_utils/azure_system_helpers.py | 10 +-
tests/test_utils/config.py | 2 +-
tests/test_utils/gcp_system_helpers.py | 4 +-
tests/test_utils/get_all_tests.py | 10 +-
tests/test_utils/hdfs_utils.py | 214 ++---
tests/test_utils/mock_operators.py | 34 +-
tests/test_utils/mock_security_manager.py | 2 +-
tests/test_utils/perf/dags/elastic_dag.py | 8 +-
tests/test_utils/perf/dags/perf_dag_1.py | 16 +-
tests/test_utils/perf/dags/perf_dag_2.py | 16 +-
.../perf/scheduler_dag_execution_timing.py | 46 +-
tests/test_utils/perf/scheduler_ops_metrics.py | 40 +-
tests/test_utils/providers.py | 6 +-
tests/test_utils/salesforce_system_helpers.py | 4 +-
tests/test_utils/sftp_system_helpers.py | 2 +-
tests/test_utils/system_tests_class.py | 2 +-
tests/test_utils/terraform.py | 2 +-
tests/test_utils/www.py | 4 +-
tests/ti_deps/deps/test_dag_unpaused_dep.py | 4 +-
tests/ti_deps/deps/test_dagrun_exists_dep.py | 4 +-
tests/ti_deps/deps/test_dagrun_id_dep.py | 2 +-
tests/ti_deps/deps/test_not_in_retry_period_dep.py | 4 +-
.../ti_deps/deps/test_pool_slots_available_dep.py | 16 +-
tests/ti_deps/deps/test_prev_dagrun_dep.py | 10 +-
tests/ti_deps/deps/test_ready_to_reschedule_dep.py | 30 +-
tests/ti_deps/deps/test_runnable_exec_date_dep.py | 10 +-
tests/ti_deps/deps/test_task_concurrency.py | 2 +-
tests/ti_deps/deps/test_trigger_rule_dep.py | 30 +-
tests/timetables/test_interval_timetable.py | 2 +-
tests/triggers/test_temporal.py | 2 +-
tests/utils/log/test_json_formatter.py | 2 +-
tests/utils/log/test_log_reader.py | 32 +-
tests/utils/log/test_secrets_masker.py | 62 +-
tests/utils/test_cli_util.py | 40 +-
tests/utils/test_compression.py | 26 +-
tests/utils/test_dag_cycle.py | 62 +-
tests/utils/test_dates.py | 22 +-
tests/utils/test_db.py | 142 ++--
tests/utils/test_db_cleanup.py | 130 +--
tests/utils/test_docs.py | 24 +-
tests/utils/test_dot_renderer.py | 36 +-
tests/utils/test_email.py | 262 +++---
tests/utils/test_file.py | 24 +-
tests/utils/test_helpers.py | 78 +-
tests/utils/test_json.py | 10 +-
tests/utils/test_log_handlers.py | 26 +-
tests/utils/test_logging_mixin.py | 2 +-
tests/utils/test_module_loading.py | 6 +-
tests/utils/test_net.py | 16 +-
tests/utils/test_operator_helpers.py | 72 +-
tests/utils/test_operator_resources.py | 8 +-
tests/utils/test_process_utils.py | 24 +-
tests/utils/test_python_virtualenv.py | 26 +-
tests/utils/test_retries.py | 2 +-
tests/utils/test_serve_logs.py | 62 +-
tests/utils/test_sqlalchemy.py | 58 +-
tests/utils/test_state.py | 8 +-
tests/utils/test_task_group.py | 626 +++++++-------
.../test_task_handler_with_custom_formatter.py | 16 +-
tests/utils/test_timezone.py | 20 +-
tests/utils/test_types.py | 8 +-
tests/utils/test_weekday.py | 2 +-
tests/www/api/experimental/conftest.py | 10 +-
.../www/api/experimental/test_dag_runs_endpoint.py | 54 +-
tests/www/api/experimental/test_endpoints.py | 224 ++---
tests/www/test_app.py | 78 +-
tests/www/test_init_views.py | 4 +-
tests/www/test_security.py | 170 ++--
tests/www/test_utils.py | 106 +--
tests/www/test_validators.py | 20 +-
tests/www/views/conftest.py | 82 +-
tests/www/views/test_session.py | 10 +-
tests/www/views/test_views.py | 102 +--
tests/www/views/test_views_acl.py | 172 ++--
tests/www/views/test_views_base.py | 120 +--
tests/www/views/test_views_configuration.py | 40 +-
tests/www/views/test_views_connection.py | 124 +--
tests/www/views/test_views_custom_user_views.py | 36 +-
tests/www/views/test_views_dagrun.py | 6 +-
tests/www/views/test_views_dataset.py | 60 +-
tests/www/views/test_views_decorators.py | 24 +-
tests/www/views/test_views_extra_links.py | 38 +-
tests/www/views/test_views_graph_gantt.py | 20 +-
tests/www/views/test_views_grid.py | 318 ++++----
tests/www/views/test_views_home.py | 56 +-
tests/www/views/test_views_log.py | 102 +--
tests/www/views/test_views_mount.py | 10 +-
tests/www/views/test_views_pool.py | 32 +-
tests/www/views/test_views_rendered.py | 28 +-
tests/www/views/test_views_robots.py | 18 +-
tests/www/views/test_views_tasks.py | 702 ++++++++--------
tests/www/views/test_views_trigger_dag.py | 56 +-
tests/www/views/test_views_variable.py | 54 +-
1143 files changed, 34256 insertions(+), 34249 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 60faf6ebb0..138c8110de 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -155,6 +155,7 @@ repos:
name: Run black (python formatter) on core
args: [--config=./pyproject.toml, --skip-string-normalization]
exclude: ^airflow/_vendor/|^airflow/contrib/|^airflow/providers/
+ files: ^airflow/
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
@@ -162,6 +163,13 @@ repos:
name: Run black (python formatter) on providers
args: [--config=./pyproject.toml]
files: ^airflow/providers/
+ - repo: https://github.com/psf/black
+ rev: 22.3.0
+ hooks:
+ - id: black
+ name: Run black (python formatter) on other
+ args: [--config=./pyproject.toml]
+ exclude: ^airflow/
- repo: https://github.com/asottile/blacken-docs
rev: v1.12.1
hooks:
diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst
index 53e7e58847..7fd5c6a919 100644
--- a/STATIC_CODE_CHECKS.rst
+++ b/STATIC_CODE_CHECKS.rst
@@ -134,6 +134,7 @@ require Breeze Docker image to be build locally.
+========================================================+==================================================================+=========+
| black | * Run black (python formatter) on core | |
| | * Run black (python formatter) on providers | |
+| | * Run black (python formatter) on other | |
+--------------------------------------------------------+------------------------------------------------------------------+---------+
| blacken-docs | Run black on python code blocks in documentation files | |
+--------------------------------------------------------+------------------------------------------------------------------+---------+
diff --git a/dev/airflow-github b/dev/airflow-github
index d9e4afbe77..d042252413 100755
--- a/dev/airflow-github
+++ b/dev/airflow-github
@@ -32,23 +32,23 @@ from github import Github
from github.Issue import Issue
from github.PullRequest import PullRequest
-GIT_COMMIT_FIELDS = ['id', 'author_name', 'author_email', 'date', 'subject', 'body']
-GIT_LOG_FORMAT = '%x1f'.join(['%h', '%an', '%ae', '%ad', '%s', '%b']) + '%x1e'
+GIT_COMMIT_FIELDS = ["id", "author_name", "author_email", "date", "subject", "body"]
+GIT_LOG_FORMAT = "%x1f".join(["%h", "%an", "%ae", "%ad", "%s", "%b"]) + "%x1e"
pr_title_re = re.compile(r".*\((#[0-9]{1,6})\)$")
STATUS_COLOR_MAP = {
- 'Closed': 'green',
- 'Open': 'red',
+ "Closed": "green",
+ "Open": "red",
}
-DEFAULT_SECTION_NAME = 'Uncategorized'
+DEFAULT_SECTION_NAME = "Uncategorized"
def get_commits_between(repo, previous_version, target_version):
- log_args = [f'--format={GIT_LOG_FORMAT}', previous_version + ".." + target_version]
+ log_args = [f"--format={GIT_LOG_FORMAT}", previous_version + ".." + target_version]
log = repo.git.log(*log_args)
- log = log.strip('\n\x1e').split("\x1e")
+ log = log.strip("\n\x1e").split("\x1e")
log = [row.strip().split("\x1f") for row in log]
return [dict(zip(GIT_COMMIT_FIELDS, row)) for row in log]
@@ -87,7 +87,7 @@ def get_commit_in_main_associated_with_pr(repo: git.Repo, issue: Issue) -> str |
def is_cherrypicked(repo: git.Repo, issue: Issue, previous_version: str | None = None) -> bool:
"""Check if a given issue is cherry-picked in the current branch or not"""
- log_args = ['--format=%H', f"--grep=#{issue.number}"]
+ log_args = ["--format=%H", f"--grep=#{issue.number}"]
if previous_version:
log_args.append(previous_version + "..")
log = repo.git.log(*log_args)
@@ -192,7 +192,7 @@ def print_changelog(sections):
print(section)
print('"' * len(section))
for line in lines:
- print('-', line)
+ print("-", line)
print()
@@ -206,22 +206,22 @@ def cli():
"""
-@cli.command(short_help='Compare a GitHub target version against git merges')
-@click.argument('target_version')
-@click.argument('github-token', envvar='GITHUB_TOKEN')
+@cli.command(short_help="Compare a GitHub target version against git merges")
+@click.argument("target_version")
+@click.argument("github-token", envvar="GITHUB_TOKEN")
@click.option(
- '--previous-version',
- 'previous_version',
+ "--previous-version",
+ "previous_version",
help="Specify the previous tag on the working branch to limit"
" searching for few commits to find the cherry-picked commits",
)
-@click.option('--unmerged', 'show_uncherrypicked_only', help="Show unmerged PRs only", is_flag=True)
+@click.option("--unmerged", "show_uncherrypicked_only", help="Show unmerged PRs only", is_flag=True)
def compare(target_version, github_token, previous_version=None, show_uncherrypicked_only=False):
repo = git.Repo(".", search_parent_directories=True)
github_handler = Github(github_token)
milestone_issues: list[Issue] = list(
- github_handler.search_issues(f"repo:apache/airflow milestone:\"Airflow {target_version}\"")
+ github_handler.search_issues(f'repo:apache/airflow milestone:"Airflow {target_version}"')
)
num_cherrypicked = 0
@@ -283,10 +283,10 @@ def compare(target_version, github_token, previous_version=None, show_uncherrypi
)
-@cli.command(short_help='Build a CHANGELOG grouped by GitHub Issue type')
-@click.argument('previous_version')
-@click.argument('target_version')
-@click.argument('github-token', envvar='GITHUB_TOKEN')
+@cli.command(short_help="Build a CHANGELOG grouped by GitHub Issue type")
+@click.argument("previous_version")
+@click.argument("target_version")
+@click.argument("github-token", envvar="GITHUB_TOKEN")
def changelog(previous_version, target_version, github_token):
repo = git.Repo(".", search_parent_directories=True)
# Get a list of issues/PRs that have been committed on the current branch.
@@ -296,23 +296,23 @@ def changelog(previous_version, target_version, github_token):
gh_repo = gh.get_repo("apache/airflow")
sections = defaultdict(list)
for commit in log:
- tickets = pr_title_re.findall(commit['subject'])
+ tickets = pr_title_re.findall(commit["subject"])
if tickets:
issue = gh_repo.get_issue(number=int(tickets[0][1:]))
issue_type = get_issue_type(issue)
- sections[issue_type].append(commit['subject'])
+ sections[issue_type].append(commit["subject"])
else:
- sections[DEFAULT_SECTION_NAME].append(commit['subject'])
+ sections[DEFAULT_SECTION_NAME].append(commit["subject"])
print_changelog(sections)
-@cli.command(short_help='Find merged PRs that still need to be categorized for the changelog')
-@click.argument('previous_version')
-@click.argument('target_version')
-@click.option('--show-skipped', is_flag=True)
-@click.option('--show-files', is_flag=True)
-@click.argument('github-token', envvar='GITHUB_TOKEN')
+@cli.command(short_help="Find merged PRs that still need to be categorized for the changelog")
+@click.argument("previous_version")
+@click.argument("target_version")
+@click.option("--show-skipped", is_flag=True)
+@click.option("--show-files", is_flag=True)
+@click.argument("github-token", envvar="GITHUB_TOKEN")
def needs_categorization(previous_version, target_version, show_skipped, show_files, github_token):
repo = git.Repo(".", search_parent_directories=True)
log = get_commits_between(repo, previous_version, target_version)
@@ -320,12 +320,12 @@ def needs_categorization(previous_version, target_version, show_skipped, show_fi
gh = Github(github_token)
gh_repo = gh.get_repo("apache/airflow")
for commit in log:
- tickets = pr_title_re.findall(commit['subject'])
+ tickets = pr_title_re.findall(commit["subject"])
if tickets:
issue = gh_repo.get_issue(number=int(tickets[0][1:]))
issue_type = get_issue_type(issue)
if issue_type == DEFAULT_SECTION_NAME:
- files = files_touched(repo, commit['id'])
+ files = files_touched(repo, commit["id"])
if is_core_commit(files):
print(f"{commit['subject']}: {issue.html_url}")
if show_files:
diff --git a/dev/airflow-license b/dev/airflow-license
index a38edc8a2f..0f9cc7cfbb 100755
--- a/dev/airflow-license
+++ b/dev/airflow-license
@@ -27,20 +27,20 @@ import slugify
# order is important
_licenses = {
- 'MIT': [
- 'Permission is hereby granted free of charge',
- 'The above copyright notice and this permission notice shall',
+ "MIT": [
+ "Permission is hereby granted free of charge",
+ "The above copyright notice and this permission notice shall",
],
- 'BSD-3': [
- 'Redistributions of source code must retain the above copyright',
- 'Redistributions in binary form must reproduce the above copyright',
- 'specific prior written permission',
+ "BSD-3": [
+ "Redistributions of source code must retain the above copyright",
+ "Redistributions in binary form must reproduce the above copyright",
+ "specific prior written permission",
],
- 'BSD-2': [
- 'Redistributions of source code must retain the above copyright',
- 'Redistributions in binary form must reproduce the above copyright',
+ "BSD-2": [
+ "Redistributions of source code must retain the above copyright",
+ "Redistributions in binary form must reproduce the above copyright",
],
- 'AL': ['http://www.apache.org/licenses/LICENSE-2.0'],
+ "AL": ["http://www.apache.org/licenses/LICENSE-2.0"],
}
diff --git a/dev/assign_cherry_picked_prs_with_milestone.py b/dev/assign_cherry_picked_prs_with_milestone.py
index 4442c0dd06..4696038f1b 100755
--- a/dev/assign_cherry_picked_prs_with_milestone.py
+++ b/dev/assign_cherry_picked_prs_with_milestone.py
@@ -52,7 +52,7 @@ DOC_ONLY_CHANGES_FILE = "doc-only-changes.txt"
EXCLUDED_CHANGES_FILE = "excluded-changes.txt"
-@click.group(context_settings={'help_option_names': ['-h', '--help'], 'max_content_width': 500})
+@click.group(context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 500})
def cli():
...
@@ -95,7 +95,7 @@ option_github_token = click.option(
Can be generated with:
https://github.com/settings/tokens/new?description=Read%20Write%20isssues&scopes=repo"""
),
- envvar='GITHUB_TOKEN',
+ envvar="GITHUB_TOKEN",
)
option_limit_pr_count = click.option(
@@ -185,7 +185,7 @@ def get_git_log_command(
git_cmd.append(f"{from_commit}...{to_commit}")
elif from_commit:
git_cmd.append(from_commit)
- git_cmd.extend(['--', '.'])
+ git_cmd.extend(["--", "."])
if verbose:
console.print(f"Command to run: '{' '.join(git_cmd)}'")
return git_cmd
@@ -214,7 +214,7 @@ def get_change_from_line(line: str) -> Change:
short_hash=split_line[1],
date=split_line[2],
message=message,
- message_without_backticks=message.replace("`", "'").replace("'", "'").replace('&', "&"),
+ message_without_backticks=message.replace("`", "'").replace("'", "'").replace("&", "&"),
pr=int(pr) if pr else None,
)
@@ -293,7 +293,7 @@ def assign_prs(
if pr_number is None:
# Should not happen but MyPy is not happy
continue
- console.print('-' * 80)
+ console.print("-" * 80)
console.print(
f"\n >>>> Retrieving PR#{pr_number}: https://github.com/apache/airflow/pull/{pr_number}"
)
diff --git a/dev/breeze/README.md b/dev/breeze/README.md
index b5396efd53..67ed31c85a 100644
--- a/dev/breeze/README.md
+++ b/dev/breeze/README.md
@@ -52,6 +52,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT.
---------------------------------------------------------------------------------------------------------
-Package config hash: 77e3176fb14ec89ad2d40b5da6c4e4834e75186e7628d9a490662437e1ae2133577f3baf8934cee6cadd5fb6fdb4cc38ef86607c4a187a0d4d56215f3be86184
+Package config hash: 3097f9e408686a68b2736d6c6d96866f9eb41525b83cda3b8ed29a13bd38fc8da9a6bc3b74444467054f31927107e6a640504b9e011bd1603dda5d7605a4267e
---------------------------------------------------------------------------------------------------------
diff --git a/dev/breeze/pyproject.toml b/dev/breeze/pyproject.toml
index 0fb2fed307..d3fc07fb15 100644
--- a/dev/breeze/pyproject.toml
+++ b/dev/breeze/pyproject.toml
@@ -17,4 +17,3 @@
[tool.black]
line-length = 110
target-version = ['py37', 'py38', 'py39', 'py310']
-skip-string-normalization = true
diff --git a/dev/breeze/src/airflow_breeze/breeze.py b/dev/breeze/src/airflow_breeze/breeze.py
index 66b837f7c4..abac73f0f3 100755
--- a/dev/breeze/src/airflow_breeze/breeze.py
+++ b/dev/breeze/src/airflow_breeze/breeze.py
@@ -45,5 +45,5 @@ main.add_command(prod_image)
main.add_command(setup)
main.add_command(release_management)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/dev/breeze/src/airflow_breeze/commands/ci_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_commands.py
index bff3407c3c..8e2549542b 100644
--- a/dev/breeze/src/airflow_breeze/commands/ci_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/ci_commands.py
@@ -64,7 +64,7 @@ from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, MSSQL_TMP_DIR_
from airflow_breeze.utils.run_utils import run_command
-@click.group(cls=BreezeGroup, name='ci', help='Tools that CI workflows use to cleanup/manage CI environment')
+@click.group(cls=BreezeGroup, name="ci", help="Tools that CI workflows use to cleanup/manage CI environment")
def ci_group():
pass
@@ -108,7 +108,7 @@ def resource_check(verbose: bool, dry_run: bool):
check_docker_resources(shell_params.airflow_image_name, verbose=verbose, dry_run=dry_run)
-HOME_DIR = Path(os.path.expanduser('~')).resolve()
+HOME_DIR = Path(os.path.expanduser("~")).resolve()
DIRECTORIES_TO_FIX = [
AIRFLOW_SOURCES_ROOT,
@@ -135,11 +135,11 @@ def fix_ownership_for_file(file: Path, dry_run: bool, verbose: bool):
def fix_ownership_for_path(path: Path, dry_run: bool, verbose: bool):
if path.is_dir():
- for p in Path(path).rglob('*'):
- if p.owner == 'root':
+ for p in Path(path).rglob("*"):
+ if p.owner == "root":
fix_ownership_for_file(p, dry_run=dry_run, verbose=verbose)
else:
- if path.owner == 'root':
+ if path.owner == "root":
fix_ownership_for_file(path, dry_run=dry_run, verbose=verbose)
@@ -150,17 +150,17 @@ def fix_ownership_without_docker(dry_run: bool, verbose: bool):
@ci_group.command(name="fix-ownership", help="Fix ownership of source files to be same as host user.")
@click.option(
- '--use-sudo',
+ "--use-sudo",
is_flag=True,
help="Use sudo instead of docker image to fix the ownership. You need to be a `sudoer` to run it",
- envvar='USE_SUDO',
+ envvar="USE_SUDO",
)
@option_github_repository
@option_verbose
@option_dry_run
def fix_ownership(github_repository: str, use_sudo: bool, verbose: bool, dry_run: bool):
system = platform.system().lower()
- if system != 'linux':
+ if system != "linux":
get_console().print(
f"[warning]You should only need to run fix-ownership on Linux and your system is {system}"
)
@@ -204,32 +204,32 @@ def get_changed_files(commit_ref: str | None, dry_run: bool, verbose: bool) -> t
name="selective-check", help="Checks what kind of tests should be run for an incoming commit."
)
@click.option(
- '--commit-ref',
+ "--commit-ref",
help="Commit-ish reference to the commit that should be checked",
- envvar='COMMIT_REF',
+ envvar="COMMIT_REF",
)
@click.option(
- '--pr-labels',
+ "--pr-labels",
help="Python array formatted PR labels assigned to the PR",
default="",
envvar="PR_LABELS",
)
@click.option(
- '--default-branch',
+ "--default-branch",
help="Branch against which the PR should be run",
default="main",
envvar="DEFAULT_BRANCH",
show_default=True,
)
@click.option(
- '--default-constraints-branch',
+ "--default-constraints-branch",
help="Constraints Branch against which the PR should be run",
default="constraints-main",
envvar="DEFAULT_CONSTRAINTS_BRANCH",
show_default=True,
)
@click.option(
- '--github-event-name',
+ "--github-event-name",
type=BetterChoice(github_events()),
default=github_events()[0],
help="Name of the GitHub event that triggered the check",
@@ -302,7 +302,7 @@ class WorkflowInfo(NamedTuple):
yield get_ga_output(name="pr_number", value=str(self.pr_number) if self.pr_number else "")
yield get_ga_output(name="event_name", value=str(self.event_name))
yield get_ga_output(name="runs-on", value=self.get_runs_on())
- yield get_ga_output(name='in-workflow-build', value=self.in_workflow_build())
+ yield get_ga_output(name="in-workflow-build", value=self.in_workflow_build())
yield get_ga_output(name="build-job-description", value=self.get_build_job_description())
yield get_ga_output(name="canary-run", value=self.is_canary_run())
yield get_ga_output(name="run-coverage", value=self.run_coverage())
@@ -326,13 +326,13 @@ class WorkflowInfo(NamedTuple):
return "false"
def get_build_job_description(self) -> str:
- if self.in_workflow_build() == 'true':
+ if self.in_workflow_build() == "true":
return "Build"
return "Skip Build (look in pull_request_target)"
def is_canary_run(self) -> str:
if (
- self.event_name == 'push'
+ self.event_name == "push"
and self.head_repo == "apache/airflow"
and self.ref_name
and (self.ref_name == "main" or TEST_BRANCH_MATCHER.match(self.ref_name))
@@ -341,7 +341,7 @@ class WorkflowInfo(NamedTuple):
return "false"
def run_coverage(self) -> str:
- if self.event_name == 'push' and self.head_repo == "apache/airflow" and self.ref == "refs/head/main":
+ if self.event_name == "push" and self.head_repo == "apache/airflow" and self.ref == "refs/head/main":
return "true"
return "false"
@@ -359,26 +359,26 @@ def workflow_info(context: str) -> WorkflowInfo:
ref_name = ctx.get("ref_name")
ref = ctx.get("ref")
if event_name == "pull_request":
- event = ctx.get('event')
+ event = ctx.get("event")
if event:
- pr = event.get('pull_request')
+ pr = event.get("pull_request")
if pr:
- labels = pr.get('labels')
+ labels = pr.get("labels")
if labels:
for label in labels:
- pull_request_labels.append(label['name'])
+ pull_request_labels.append(label["name"])
target_repo = pr["base"]["repo"]["full_name"]
head_repo = pr["head"]["repo"]["full_name"]
pr_number = pr["number"]
- elif event_name == 'push':
+ elif event_name == "push":
target_repo = ctx["repository"]
head_repo = ctx["repository"]
event_name = ctx["event_name"]
- elif event_name == 'schedule':
+ elif event_name == "schedule":
target_repo = ctx["repository"]
head_repo = ctx["repository"]
event_name = ctx["event_name"]
- elif event_name == 'pull_request_target':
+ elif event_name == "pull_request_target":
target_repo = ctx["repository"]
head_repo = ctx["repository"]
event_name = ctx["event_name"]
@@ -401,12 +401,12 @@ def workflow_info(context: str) -> WorkflowInfo:
help="Retrieve information about current workflow in the CI"
"and produce github actions output extracted from it.",
)
-@click.option('--github-context', help="JSON-formatted github context", envvar='GITHUB_CONTEXT')
+@click.option("--github-context", help="JSON-formatted github context", envvar="GITHUB_CONTEXT")
@click.option(
- '--github-context-input',
+ "--github-context-input",
help="file input (might be `-`) with JSON-formatted github context",
- type=click.File('rt'),
- envvar='GITHUB_CONTEXT_INPUT',
+ type=click.File("rt"),
+ envvar="GITHUB_CONTEXT_INPUT",
)
def get_workflow_info(github_context: str, github_context_input: StringIO):
if github_context and github_context_input:
diff --git a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py
index 34119ee4de..946b6f8b68 100644
--- a/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/ci_image_commands.py
@@ -99,7 +99,7 @@ from airflow_breeze.utils.run_utils import (
@click.group(
- cls=BreezeGroup, name='ci-image', help="Tools that developers can use to manually manage CI images"
+ cls=BreezeGroup, name="ci-image", help="Tools that developers can use to manually manage CI images"
)
def ci_image():
pass
@@ -164,7 +164,7 @@ def start_building(params: BuildCiParams, dry_run: bool, verbose: bool):
make_sure_builder_configured(params=params, dry_run=dry_run, verbose=verbose)
-@ci_image.command(name='build')
+@ci_image.command(name="build")
@option_github_repository
@option_verbose
@option_dry_run
@@ -228,7 +228,7 @@ def build(
perform_environment_checks(verbose=verbose)
parameters_passed = filter_out_none(**kwargs)
- parameters_passed['force_build'] = True
+ parameters_passed["force_build"] = True
fix_group_permissions(verbose=verbose)
if run_in_parallel:
python_version_list = get_python_version_list(python_versions)
@@ -255,7 +255,7 @@ def build(
run_build(ci_image_params=params)
-@ci_image.command(name='pull')
+@ci_image.command(name="pull")
@option_verbose
@option_dry_run
@option_python
@@ -272,7 +272,7 @@ def build(
@option_image_tag_for_pulling
@option_include_success_outputs
@option_tag_as_latest
-@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra_pytest_args", nargs=-1, type=click.UNPROCESSED)
def pull(
verbose: bool,
dry_run: bool,
@@ -336,7 +336,7 @@ def pull(
@ci_image.command(
- name='verify',
+ name="verify",
context_settings=dict(
ignore_unknown_options=True,
allow_extra_args=True,
@@ -349,7 +349,7 @@ def pull(
@option_image_tag_for_verifying
@option_image_name
@option_pull
-@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra_pytest_args", nargs=-1, type=click.UNPROCESSED)
def verify(
verbose: bool,
dry_run: bool,
@@ -374,7 +374,7 @@ def verify(
output=None,
verbose=verbose,
dry_run=dry_run,
- image_type='CI',
+ image_type="CI",
slim_image=False,
extra_pytest_args=extra_pytest_args,
)
@@ -425,20 +425,20 @@ def should_we_run_the_build(build_ci_params: BuildCiParams) -> bool:
"before continuing.[/]\nCheck this link to find out how "
"https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#id15\n"
)
- get_console().print('[error]Exiting the process[/]\n')
+ get_console().print("[error]Exiting the process[/]\n")
sys.exit(1)
elif answer == Answer.NO:
instruct_build_image(build_ci_params.python)
return False
else: # users_status == Answer.QUIT:
- get_console().print('\n[warning]Quitting the process[/]\n')
+ get_console().print("\n[warning]Quitting the process[/]\n")
sys.exit()
except TimeoutOccurred:
- get_console().print('\nTimeout. Considering your response as No\n')
+ get_console().print("\nTimeout. Considering your response as No\n")
instruct_build_image(build_ci_params.python)
return False
except Exception as e:
- get_console().print(f'\nTerminating the process on {e}')
+ get_console().print(f"\nTerminating the process on {e}")
sys.exit(1)
@@ -488,7 +488,7 @@ def run_build_ci_image(
else:
if ci_image_params.empty_image:
env = os.environ.copy()
- env['DOCKER_BUILDKIT'] = "1"
+ env["DOCKER_BUILDKIT"] = "1"
get_console(output=output).print(
f"\n[info]Building empty CI Image for Python {ci_image_params.python}\n"
)
@@ -605,11 +605,11 @@ def rebuild_or_pull_ci_image_if_needed(
return
if build_ci_image_check_cache.exists():
if verbose:
- get_console().print(f'[info]{command_params.image_type} image already built locally.[/]')
+ get_console().print(f"[info]{command_params.image_type} image already built locally.[/]")
else:
get_console().print(
- f'[warning]{command_params.image_type} image was never built locally or deleted. '
- 'Forcing build.[/]'
+ f"[warning]{command_params.image_type} image was never built locally or deleted. "
+ "Forcing build.[/]"
)
ci_image_params.force_build = True
if check_if_image_building_is_needed(
diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands.py b/dev/breeze/src/airflow_breeze/commands/developer_commands.py
index 949b487169..6efd293033 100644
--- a/dev/breeze/src/airflow_breeze/commands/developer_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/developer_commands.py
@@ -130,7 +130,7 @@ class TimerThread(threading.Thread):
@option_answer
@option_max_time
@option_include_mypy_volume
-@click.argument('extra-args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra-args", nargs=-1, type=click.UNPROCESSED)
def shell(
verbose: bool,
dry_run: bool,
@@ -163,7 +163,7 @@ def shell(
get_console().print(f"\n[success]Root of Airflow Sources = {AIRFLOW_SOURCES_ROOT}[/]\n")
if max_time:
TimerThread(max_time=max_time).start()
- set_forced_answer('yes')
+ set_forced_answer("yes")
enter_shell(
verbose=verbose,
dry_run=dry_run,
@@ -192,7 +192,7 @@ def shell(
@option_verbose
-@main.command(name='start-airflow')
+@main.command(name="start-airflow")
@option_dry_run
@option_python
@option_platform_single
@@ -214,20 +214,20 @@ def shell(
@option_integration
@option_image_tag_for_running
@click.option(
- '--skip-asset-compilation',
+ "--skip-asset-compilation",
help="Skips compilation of assets when starting airflow even if the content of www changed "
"(mutually exclusive with --dev-mode).",
is_flag=True,
)
@click.option(
- '--dev-mode',
+ "--dev-mode",
help="Starts webserver in dev mode (assets are always recompiled in this case when starting) "
"(mutually exclusive with --skip-asset-compilation).",
is_flag=True,
)
@option_db_reset
@option_answer
-@click.argument('extra-args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra-args", nargs=-1, type=click.UNPROCESSED)
def start_airflow(
verbose: bool,
dry_run: bool,
@@ -262,7 +262,7 @@ def start_airflow(
"""
if dev_mode and skip_asset_compilation:
get_console().print(
- '[warning]You cannot skip asset compilation in dev mode! Assets will be compiled!'
+ "[warning]You cannot skip asset compilation in dev mode! Assets will be compiled!"
)
skip_asset_compilation = True
if use_airflow_version is None and not skip_asset_compilation:
@@ -297,26 +297,26 @@ def start_airflow(
)
-@main.command(name='build-docs')
+@main.command(name="build-docs")
@option_verbose
@option_dry_run
@option_github_repository
-@click.option('-d', '--docs-only', help="Only build documentation.", is_flag=True)
-@click.option('-s', '--spellcheck-only', help="Only run spell checking.", is_flag=True)
+@click.option("-d", "--docs-only", help="Only build documentation.", is_flag=True)
+@click.option("-s", "--spellcheck-only", help="Only run spell checking.", is_flag=True)
@click.option(
- '--package-filter',
+ "--package-filter",
help="List of packages to consider.",
type=NotVerifiedBetterChoice(get_available_documentation_packages()),
multiple=True,
)
@click.option(
- '--clean-build',
+ "--clean-build",
help="Clean inventories of Inter-Sphinx documentation and generated APIs and sphinx artifacts "
"before the build - useful for a clean build.",
is_flag=True,
)
@click.option(
- '--for-production',
+ "--for-production",
help="Builds documentation for official release i.e. all links point to stable version. "
"Implies --clean-build",
is_flag=True,
@@ -340,7 +340,7 @@ def build_docs(
rebuild_or_pull_ci_image_if_needed(command_params=params, dry_run=dry_run, verbose=verbose)
if clean_build:
docs_dir = AIRFLOW_SOURCES_ROOT / "docs"
- for dir_name in ['_build', "_doctrees", '_inventory_cache', '_api']:
+ for dir_name in ["_build", "_doctrees", "_inventory_cache", "_api"]:
for dir in docs_dir.rglob(dir_name):
get_console().print(f"[info]Removing {dir}")
shutil.rmtree(dir, ignore_errors=True)
@@ -378,26 +378,26 @@ def build_docs(
),
)
@click.option(
- '-t',
- '--type',
+ "-t",
+ "--type",
help="Type(s) of the static checks to run (multiple can be added).",
type=BetterChoice(PRE_COMMIT_LIST),
multiple=True,
)
-@click.option('-a', '--all-files', help="Run checks on all files.", is_flag=True)
-@click.option('-f', '--file', help="List of files to run the checks on.", type=click.Path(), multiple=True)
+@click.option("-a", "--all-files", help="Run checks on all files.", is_flag=True)
+@click.option("-f", "--file", help="List of files to run the checks on.", type=click.Path(), multiple=True)
@click.option(
- '-s', '--show-diff-on-failure', help="Show diff for files modified by the checks.", is_flag=True
+ "-s", "--show-diff-on-failure", help="Show diff for files modified by the checks.", is_flag=True
)
@click.option(
- '-c',
- '--last-commit',
+ "-c",
+ "--last-commit",
help="Run checks for all files in last commit. Mutually exclusive with --commit-ref.",
is_flag=True,
)
@click.option(
- '-r',
- '--commit-ref',
+ "-r",
+ "--commit-ref",
help="Run checks for this commit reference only "
"(can be any git commit-ish reference). "
"Mutually exclusive with --last-commit.",
@@ -405,7 +405,7 @@ def build_docs(
@option_verbose
@option_dry_run
@option_github_repository
-@click.argument('precommit_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("precommit_args", nargs=-1, type=click.UNPROCESSED)
def static_checks(
verbose: bool,
dry_run: bool,
@@ -420,7 +420,7 @@ def static_checks(
):
assert_pre_commit_installed(verbose=verbose)
perform_environment_checks(verbose=verbose)
- command_to_execute = [sys.executable, "-m", "pre_commit", 'run']
+ command_to_execute = [sys.executable, "-m", "pre_commit", "run"]
if last_commit and commit_ref:
get_console().print("\n[error]You cannot specify both --last-commit and --commit-ref[/]\n")
sys.exit(1)
@@ -442,7 +442,7 @@ def static_checks(
if precommit_args:
command_to_execute.extend(precommit_args)
env = os.environ.copy()
- env['GITHUB_REPOSITORY'] = github_repository
+ env["GITHUB_REPOSITORY"] = github_repository
static_checks_result = run_command(
command_to_execute,
verbose=verbose,
@@ -453,7 +453,7 @@ def static_checks(
env=env,
)
if static_checks_result.returncode != 0:
- if os.environ.get('CI'):
+ if os.environ.get("CI"):
get_console().print("[error]There were errors during pre-commit check. They should be fixed[/]")
sys.exit(static_checks_result.returncode)
@@ -496,7 +496,7 @@ def compile_www_assets(
)
def stop(verbose: bool, dry_run: bool, preserve_volumes: bool):
perform_environment_checks(verbose=verbose)
- command_to_execute = [*DOCKER_COMPOSE_COMMAND, 'down', "--remove-orphans"]
+ command_to_execute = [*DOCKER_COMPOSE_COMMAND, "down", "--remove-orphans"]
if not preserve_volumes:
command_to_execute.append("--volumes")
shell_params = ShellParams(verbose=verbose, backend="all", include_mypy_volume=True)
@@ -504,10 +504,10 @@ def stop(verbose: bool, dry_run: bool, preserve_volumes: bool):
run_command(command_to_execute, verbose=verbose, dry_run=dry_run, env=env_variables)
-@main.command(name='exec', help='Joins the interactive shell of running airflow container.')
+@main.command(name="exec", help="Joins the interactive shell of running airflow container.")
@option_verbose
@option_dry_run
-@click.argument('exec_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("exec_args", nargs=-1, type=click.UNPROCESSED)
def exec(verbose: bool, dry_run: bool, exec_args: tuple):
perform_environment_checks(verbose=verbose)
container_running = find_airflow_container(verbose, dry_run)
@@ -546,12 +546,12 @@ def enter_shell(**kwargs) -> RunCommandResult:
* executes the command to drop the user to Breeze shell
"""
- verbose = kwargs['verbose']
- dry_run = kwargs['dry_run']
+ verbose = kwargs["verbose"]
+ dry_run = kwargs["dry_run"]
perform_environment_checks(verbose=verbose)
- if read_from_cache_file('suppress_asciiart') is None:
+ if read_from_cache_file("suppress_asciiart") is None:
get_console().print(ASCIIART, style=ASCIIART_STYLE)
- if read_from_cache_file('suppress_cheatsheet') is None:
+ if read_from_cache_file("suppress_cheatsheet") is None:
get_console().print(CHEATSHEET, style=CHEATSHEET_STYLE)
enter_shell_params = ShellParams(**filter_out_none(**kwargs))
rebuild_or_pull_ci_image_if_needed(command_params=enter_shell_params, dry_run=dry_run, verbose=verbose)
@@ -572,17 +572,17 @@ def run_shell(verbose: bool, dry_run: bool, shell_params: ShellParams) -> RunCom
:param shell_params: parameters of the execution
"""
shell_params.print_badge_info()
- cmd = [*DOCKER_COMPOSE_COMMAND, 'run', '--service-ports', "-e", "BREEZE", '--rm', 'airflow']
+ cmd = [*DOCKER_COMPOSE_COMMAND, "run", "--service-ports", "-e", "BREEZE", "--rm", "airflow"]
cmd_added = shell_params.command_passed
env_variables = get_env_variables_for_docker_commands(shell_params)
if cmd_added is not None:
- cmd.extend(['-c', cmd_added])
+ cmd.extend(["-c", cmd_added])
if "arm64" in DOCKER_DEFAULT_PLATFORM:
if shell_params.backend == "mysql":
- get_console().print('\n[error]MySQL is not supported on ARM architecture.[/]\n')
+ get_console().print("\n[error]MySQL is not supported on ARM architecture.[/]\n")
sys.exit(1)
if shell_params.backend == "mssql":
- get_console().print('\n[error]MSSQL is not supported on ARM architecture[/]\n')
+ get_console().print("\n[error]MSSQL is not supported on ARM architecture[/]\n")
sys.exit(1)
command_result = run_command(
cmd, verbose=verbose, dry_run=dry_run, env=env_variables, text=True, check=False
@@ -597,7 +597,7 @@ def run_shell(verbose: bool, dry_run: bool, shell_params: ShellParams) -> RunCom
def stop_exec_on_error(returncode: int):
- get_console().print('\n[error]ERROR in finding the airflow docker-compose process id[/]\n')
+ get_console().print("\n[error]ERROR in finding the airflow docker-compose process id[/]\n")
sys.exit(returncode)
@@ -606,7 +606,7 @@ def find_airflow_container(verbose, dry_run) -> str | None:
check_docker_resources(exec_shell_params.airflow_image_name, verbose=verbose, dry_run=dry_run)
exec_shell_params.print_badge_info()
env_variables = get_env_variables_for_docker_commands(exec_shell_params)
- cmd = [*DOCKER_COMPOSE_COMMAND, 'ps', '--all', '--filter', 'status=running', 'airflow']
+ cmd = [*DOCKER_COMPOSE_COMMAND, "ps", "--all", "--filter", "status=running", "airflow"]
docker_compose_ps_command = run_command(
cmd, verbose=verbose, dry_run=dry_run, text=True, capture_output=True, env=env_variables, check=False
)
@@ -620,10 +620,10 @@ def find_airflow_container(verbose, dry_run) -> str | None:
return None
output = docker_compose_ps_command.stdout
- container_info = output.strip().split('\n')
+ container_info = output.strip().split("\n")
if container_info:
- container_running = container_info[-1].split(' ')[0]
- if container_running.startswith('-'):
+ container_running = container_info[-1].split(" ")[0]
+ if container_running.startswith("-"):
# On docker-compose v1 we get '--------' as output here
stop_exec_on_error(docker_compose_ps_command.returncode)
return container_running
diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
index dbd818f794..0d73b955eb 100644
--- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
@@ -98,115 +98,115 @@ PARALLEL_PYTEST_ARGS = [
]
-@click.group(cls=BreezeGroup, name='k8s', help='Tools that developers use to run Kubernetes tests')
+@click.group(cls=BreezeGroup, name="k8s", help="Tools that developers use to run Kubernetes tests")
def kubernetes_group():
pass
option_executor = click.option(
- '--executor',
- help='Executor to use for a kubernetes cluster.',
+ "--executor",
+ help="Executor to use for a kubernetes cluster.",
type=CacheableChoice(ALLOWED_EXECUTORS),
show_default=True,
default=CacheableDefault(ALLOWED_EXECUTORS[0]),
- envvar='EXECUTOR',
+ envvar="EXECUTOR",
)
option_kubernetes_version = click.option(
- '--kubernetes-version',
- help='Kubernetes version used to create the KinD cluster of.',
+ "--kubernetes-version",
+ help="Kubernetes version used to create the KinD cluster of.",
type=CacheableChoice(ALLOWED_KUBERNETES_VERSIONS),
show_default=True,
default=CacheableDefault(ALLOWED_KUBERNETES_VERSIONS[0]),
- envvar='KUBERNETES_VERSION',
+ envvar="KUBERNETES_VERSION",
)
option_image_tag = click.option(
- '-t',
- '--image-tag',
- help='Image tag used to build K8S image from.',
- default='latest',
+ "-t",
+ "--image-tag",
+ help="Image tag used to build K8S image from.",
+ default="latest",
show_default=True,
- envvar='IMAGE_TAG',
+ envvar="IMAGE_TAG",
)
option_wait_time_in_seconds = click.option(
- '--wait-time-in-seconds',
+ "--wait-time-in-seconds",
help="Wait for Airflow webserver for specified number of seconds.",
type=click.IntRange(0),
default=120,
- envvar='WAIT_TIME_IN_SECONDS',
+ envvar="WAIT_TIME_IN_SECONDS",
)
option_wait_time_in_seconds_0_default = click.option(
- '--wait-time-in-seconds',
+ "--wait-time-in-seconds",
help="Wait for Airflow webserver for specified number of seconds.",
type=click.IntRange(0),
default=0,
- envvar='WAIT_TIME_IN_SECONDS',
+ envvar="WAIT_TIME_IN_SECONDS",
)
option_force_recreate_cluster = click.option(
- '--force-recreate-cluster',
+ "--force-recreate-cluster",
help="Force recreation of the cluster even if it is already created.",
is_flag=True,
- envvar='FORCE_RECREATE_CLUSTER',
+ envvar="FORCE_RECREATE_CLUSTER",
)
option_force_venv_setup = click.option(
- '--force-venv-setup',
+ "--force-venv-setup",
help="Force recreation of the virtualenv.",
is_flag=True,
- envvar='FORCE_VENV_SETUP',
+ envvar="FORCE_VENV_SETUP",
)
option_rebuild_base_image = click.option(
- '--rebuild-base-image',
+ "--rebuild-base-image",
help="Rebuilds base Airflow image before building K8S image.",
is_flag=True,
- envvar='REBUILD_BASE_IMAGE',
+ envvar="REBUILD_BASE_IMAGE",
)
option_kubernetes_versions = click.option(
- '--kubernetes-versions',
- help='Kubernetes versions used to run in parallel (space separated).',
+ "--kubernetes-versions",
+ help="Kubernetes versions used to run in parallel (space separated).",
type=str,
show_default=True,
default=" ".join(ALLOWED_KUBERNETES_VERSIONS),
- envvar='KUBERNETES_VERSIONS',
+ envvar="KUBERNETES_VERSIONS",
)
option_upgrade = click.option(
- '--upgrade',
+ "--upgrade",
help="Upgrade Helm Chart rather than installing it.",
is_flag=True,
- envvar='UPGRADE',
+ envvar="UPGRADE",
)
option_parallelism_cluster = click.option(
- '--parallelism',
+ "--parallelism",
help="Maximum number of processes to use while running the operation in parallel for cluster operations.",
type=click.IntRange(1, max(1, mp.cpu_count() // 4) if not generating_command_images() else 4),
default=max(1, mp.cpu_count() // 4) if not generating_command_images() else 2,
- envvar='PARALLELISM',
+ envvar="PARALLELISM",
show_default=True,
)
-option_all = click.option('--all', help="Apply it to all created clusters", is_flag=True, envvar="ALL")
+option_all = click.option("--all", help="Apply it to all created clusters", is_flag=True, envvar="ALL")
-K8S_CLUSTER_CREATE_PROGRESS_REGEXP = r'.*airflow-python-[0-9.]+-v[0-9.].*|.*Connecting to localhost.*'
-K8S_UPLOAD_PROGRESS_REGEXP = r'.*airflow-python-[0-9.]+-v[0-9.].*'
-K8S_CONFIGURE_CLUSTER_PROGRESS_REGEXP = r'.*airflow-python-[0-9.]+-v[0-9.].*'
-K8S_DEPLOY_PROGRESS_REGEXP = r'.*airflow-python-[0-9.]+-v[0-9.].*'
-K8S_TEST_PROGRESS_REGEXP = r'.*airflow-python-[0-9.]+-v[0-9.].*|^kubernetes_tests/.*'
-PREVIOUS_LINE_K8S_TEST_REGEXP = r'^kubernetes_tests/.*'
+K8S_CLUSTER_CREATE_PROGRESS_REGEXP = r".*airflow-python-[0-9.]+-v[0-9.].*|.*Connecting to localhost.*"
+K8S_UPLOAD_PROGRESS_REGEXP = r".*airflow-python-[0-9.]+-v[0-9.].*"
+K8S_CONFIGURE_CLUSTER_PROGRESS_REGEXP = r".*airflow-python-[0-9.]+-v[0-9.].*"
+K8S_DEPLOY_PROGRESS_REGEXP = r".*airflow-python-[0-9.]+-v[0-9.].*"
+K8S_TEST_PROGRESS_REGEXP = r".*airflow-python-[0-9.]+-v[0-9.].*|^kubernetes_tests/.*"
+PREVIOUS_LINE_K8S_TEST_REGEXP = r"^kubernetes_tests/.*"
COMPLETE_TEST_REGEXP = (
- r'\s*#(\d*) |'
- r'.*airflow-python-[0-9.]+-v[0-9.].*|'
- r'.*Connecting to localhost.*|'
- r'^kubernetes_tests/.*|'
- r'.*Error during running tests.*|'
- r'.*Successfully run tests.*'
+ r"\s*#(\d*) |"
+ r".*airflow-python-[0-9.]+-v[0-9.].*|"
+ r".*Connecting to localhost.*|"
+ r"^kubernetes_tests/.*|"
+ r".*Error during running tests.*|"
+ r".*Successfully run tests.*"
)
@@ -249,10 +249,10 @@ def _create_cluster(
set_random_cluster_ports(python=python, kubernetes_version=kubernetes_version, output=output)
result = run_command_with_k8s_env(
[
- 'kind',
- 'create',
- 'cluster',
- '--name',
+ "kind",
+ "create",
+ "cluster",
+ "--name",
cluster_name,
"--config",
str(get_kind_cluster_config_path(python=python, kubernetes_version=kubernetes_version)),
@@ -386,10 +386,10 @@ def _delete_cluster(
folder = get_config_folder(python=python, kubernetes_version=kubernetes_version)
run_command_with_k8s_env(
[
- 'kind',
- 'delete',
- 'cluster',
- '--name',
+ "kind",
+ "delete",
+ "cluster",
+ "--name",
cluster_name,
],
python=python,
@@ -463,7 +463,7 @@ def delete_cluster(python: str, kubernetes_version: str, all: bool, verbose: boo
def _get_python_kubernetes_version_from_name(cluster_name: str) -> tuple[str | None, str | None]:
- matcher = re.compile(r'airflow-python-(\d+\.\d+)-(v\d+.\d+.\d+)')
+ matcher = re.compile(r"airflow-python-(\d+\.\d+)-(v\d+.\d+.\d+)")
cluster_match = matcher.search(cluster_name)
if cluster_match:
python = cluster_match.group(1)
@@ -497,7 +497,7 @@ def _status(
get_console().print(f" * KINDCONFIG={kind_config_file}")
get_console().print(f"\n[info]Cluster info: {cluster_name}\n")
result = run_command_with_k8s_env(
- ['kubectl', 'cluster-info', '--cluster', kubectl_cluster_name],
+ ["kubectl", "cluster-info", "--cluster", kubectl_cluster_name],
python=python,
kubernetes_version=kubernetes_version,
dry_run=dry_run,
@@ -508,7 +508,7 @@ def _status(
return False
get_console().print(f"\n[info]Storage class for {cluster_name}\n")
result = run_command_with_k8s_env(
- ['kubectl', 'get', 'storageclass', '--cluster', kubectl_cluster_name],
+ ["kubectl", "get", "storageclass", "--cluster", kubectl_cluster_name],
python=python,
kubernetes_version=kubernetes_version,
dry_run=dry_run,
@@ -519,7 +519,7 @@ def _status(
return False
get_console().print(f"\n[info]Running pods for {cluster_name}\n")
result = run_command_with_k8s_env(
- ['kubectl', 'get', '-n', "kube-system", 'pods', '--cluster', kubectl_cluster_name],
+ ["kubectl", "get", "-n", "kube-system", "pods", "--cluster", kubectl_cluster_name],
python=python,
kubernetes_version=kubernetes_version,
dry_run=dry_run,
@@ -616,7 +616,7 @@ def _rebuild_k8s_image(
get_console(output=output).print(
f"[error]The base PROD image {params.airflow_image_name_with_tag} does not exist locally.\n"
)
- if image_tag == 'latest':
+ if image_tag == "latest":
get_console(output=output).print(
"[warning]Please add `--rebuild-base-image` flag or rebuild it manually with:\n"
)
@@ -642,7 +642,7 @@ ENV GUNICORN_CMD_ARGS='--preload' AIRFLOW__WEBSERVER__WORKER_REFRESH_INTERVAL=0
"""
image = f"{params.airflow_image_kubernetes}:latest"
docker_build_result = run_command(
- ['docker', 'build', "--tag", image, ".", "-f", "-"],
+ ["docker", "build", "--tag", image, ".", "-f", "-"],
input=docker_image_for_kubernetes_tests,
text=True,
check=False,
@@ -661,10 +661,10 @@ def _upload_k8s_image(
params = BuildProdParams(python=python)
cluster_name = get_kind_cluster_name(python=python, kubernetes_version=kubernetes_version)
get_console(output=output).print(
- f'[info]Uploading Airflow image {params.airflow_image_kubernetes} to cluster {cluster_name}'
+ f"[info]Uploading Airflow image {params.airflow_image_kubernetes} to cluster {cluster_name}"
)
kind_load_result = run_command_with_k8s_env(
- ['kind', 'load', "docker-image", "--name", cluster_name, params.airflow_image_kubernetes],
+ ["kind", "load", "docker-image", "--name", cluster_name, params.airflow_image_kubernetes],
python=python,
output=output,
kubernetes_version=kubernetes_version,
@@ -944,7 +944,7 @@ def _configure_k8s_cluster(
python: str, kubernetes_version: str, output: Output | None, verbose: bool, dry_run: bool
) -> tuple[int, str]:
cluster_name = get_kind_cluster_name(python=python, kubernetes_version=kubernetes_version)
- get_console(output=output).print(f'[info]Configuring {cluster_name} to be ready for Airflow deployment')
+ get_console(output=output).print(f"[info]Configuring {cluster_name} to be ready for Airflow deployment")
result = _recreate_namespaces(
python=python, kubernetes_version=kubernetes_version, output=output, dry_run=dry_run, verbose=verbose
)
@@ -1123,7 +1123,7 @@ def _deploy_airflow(
) -> tuple[int, str]:
action = "Deploying" if not upgrade else "Upgrading"
cluster_name = get_kind_cluster_name(python=python, kubernetes_version=kubernetes_version)
- get_console(output=output).print(f'[info]{action} Airflow for cluster {cluster_name}')
+ get_console(output=output).print(f"[info]{action} Airflow for cluster {cluster_name}")
result = _deploy_helm_chart(
python=python,
kubernetes_version=kubernetes_version,
@@ -1173,7 +1173,7 @@ def _deploy_airflow(
@option_kubernetes_versions
@option_verbose
@option_dry_run
-@click.argument('extra_options', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra_options", nargs=-1, type=click.UNPROCESSED)
def deploy_airflow(
python: str,
kubernetes_version: str,
@@ -1265,37 +1265,37 @@ def deploy_airflow(
@option_kubernetes_version
@option_verbose
@option_dry_run
-@click.argument('k9s_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("k9s_args", nargs=-1, type=click.UNPROCESSED)
def k9s(python: str, kubernetes_version: str, verbose: bool, dry_run: bool, k9s_args: tuple[str, ...]):
result = create_virtualenv(force_venv_setup=False, verbose=verbose, dry_run=dry_run)
if result.returncode != 0:
sys.exit(result.returncode)
make_sure_kubernetes_tools_are_installed(verbose=verbose, dry_run=dry_run)
env = get_k8s_env(python=python, kubernetes_version=kubernetes_version)
- env['TERM'] = 'xterm-256color'
- editor = env.get('EDITOR')
+ env["TERM"] = "xterm-256color"
+ editor = env.get("EDITOR")
if not editor:
- env['EDITOR'] = 'vim'
- k9s_editor = env.get('K9S_EDITOR')
+ env["EDITOR"] = "vim"
+ k9s_editor = env.get("K9S_EDITOR")
if not k9s_editor:
- env['K9S_EDITOR'] = env['EDITOR']
+ env["K9S_EDITOR"] = env["EDITOR"]
kubeconfig_file = get_kubeconfig_file(python=python, kubernetes_version=kubernetes_version)
result = run_command(
[
- 'docker',
- 'run',
+ "docker",
+ "run",
"--rm",
- '-it',
- '--network',
- 'host',
- '-e',
- 'EDITOR',
- '-e',
- 'K9S_EDITOR',
- '-v',
+ "-it",
+ "--network",
+ "host",
+ "-e",
+ "EDITOR",
+ "-e",
+ "K9S_EDITOR",
+ "-v",
f"{kubeconfig_file}:/root/.kube/config",
- 'quay.io/derailed/k9s',
- '--namespace',
+ "quay.io/derailed/k9s",
+ "--namespace",
HELM_AIRFLOW_NAMESPACE,
*k9s_args,
],
@@ -1313,7 +1313,7 @@ def _logs(python: str, kubernetes_version: str, verbose: bool, dry_run: bool):
tmpdir = Path(tempfile.gettempdir()) / f"kind_logs_{cluster_name}"
get_console().print(f"[info]\nDumping logs for {cluster_name} to {tmpdir}:\n")
run_command_with_k8s_env(
- ['kind', '--name', cluster_name, 'export', 'logs', str(tmpdir)],
+ ["kind", "--name", cluster_name, "export", "logs", str(tmpdir)],
python=python,
kubernetes_version=kubernetes_version,
verbose=verbose,
@@ -1370,7 +1370,7 @@ def logs(python: str, kubernetes_version: str, all: bool, verbose: bool, dry_run
@option_force_venv_setup
@option_verbose
@option_dry_run
-@click.argument('shell_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("shell_args", nargs=-1, type=click.UNPROCESSED)
def shell(
python: str,
kubernetes_version: str,
@@ -1386,12 +1386,12 @@ def shell(
make_sure_kubernetes_tools_are_installed(verbose=verbose, dry_run=dry_run)
env = get_k8s_env(python=python, kubernetes_version=kubernetes_version, executor=executor)
get_console().print("\n[info]Entering interactive k8s shell.\n")
- shell_binary = env['SHELL']
+ shell_binary = env["SHELL"]
extra_args: list[str] = []
if shell_binary.endswith("zsh"):
- extra_args.append('--no-rcs')
+ extra_args.append("--no-rcs")
elif shell_binary.endswith("bash"):
- extra_args.extend(['--norc', '--noprofile'])
+ extra_args.extend(["--norc", "--noprofile"])
result = run_command(
[shell_binary, *extra_args, *shell_args], verbose=verbose, dry_run=dry_run, env=env, check=False
)
@@ -1424,20 +1424,20 @@ def _run_tests(
env = get_k8s_env(python=python, kubernetes_version=kubernetes_version, executor=executor)
kubectl_cluster_name = get_kubectl_cluster_name(python=python, kubernetes_version=kubernetes_version)
get_console(output=output).print(f"\n[info]Running tests with {kubectl_cluster_name} cluster.")
- shell_binary = env.get('SHELL', shutil.which('bash'))
+ shell_binary = env.get("SHELL", shutil.which("bash"))
extra_shell_args: list[str] = []
if shell_binary.endswith("zsh"):
- extra_shell_args.append('--no-rcs')
+ extra_shell_args.append("--no-rcs")
elif shell_binary.endswith("bash"):
- extra_shell_args.extend(['--norc', '--noprofile'])
+ extra_shell_args.extend(["--norc", "--noprofile"])
the_tests = []
if not any(arg.startswith("kubernetes_tests") for arg in test_args):
# if no tests specified - use args
- the_tests.append('kubernetes_tests')
- command_to_run = ' '.join([quote(arg) for arg in ['pytest', *the_tests, *test_args]])
+ the_tests.append("kubernetes_tests")
+ command_to_run = " ".join([quote(arg) for arg in ["pytest", *the_tests, *test_args]])
get_console(output).print(f"[info] Command to run:[/] {command_to_run}")
result = run_command(
- [shell_binary, *extra_shell_args, '-c', command_to_run],
+ [shell_binary, *extra_shell_args, "-c", command_to_run],
verbose=verbose,
dry_run=dry_run,
output=output,
@@ -1467,7 +1467,7 @@ def _run_tests(
@option_kubernetes_versions
@option_verbose
@option_dry_run
-@click.argument('test_args', nargs=-1, type=click.Path())
+@click.argument("test_args", nargs=-1, type=click.Path())
def tests(
python: str,
kubernetes_version: str,
@@ -1704,7 +1704,7 @@ def _run_complete_tests(
@option_kubernetes_versions
@option_verbose
@option_dry_run
-@click.argument('test_args', nargs=-1, type=click.Path())
+@click.argument("test_args", nargs=-1, type=click.Path())
def run_complete_tests(
python: str,
kubernetes_version: str,
diff --git a/dev/breeze/src/airflow_breeze/commands/main_command.py b/dev/breeze/src/airflow_breeze/commands/main_command.py
index 9246fcb860..972ed83f51 100644
--- a/dev/breeze/src/airflow_breeze/commands/main_command.py
+++ b/dev/breeze/src/airflow_breeze/commands/main_command.py
@@ -64,32 +64,32 @@ class MainGroupWithAliases(BreezeGroup):
rv = click.Group.get_command(self, ctx, cmd_name)
if rv is not None:
return rv
- if cmd_name == 'build-image':
- print_deprecated('build-image', 'ci-image build')
- return ci_image.get_command(ctx, 'build')
- if cmd_name == 'build-prod-image':
- print_deprecated('build-prod-image', 'prod-image build')
- return prod_image.get_command(ctx, 'build')
- if cmd_name == 'tests':
- print_deprecated('tests', 'testing tests')
- return testing.get_command(ctx, 'tests')
- if cmd_name == 'config':
- print_deprecated('config', 'setup config')
- return setup.get_command(ctx, 'config')
- if cmd_name == 'setup-autocomplete':
- print_deprecated('setup-autocomplete', 'setup autocomplete')
- return setup.get_command(ctx, 'autocomplete')
- if cmd_name == 'version':
+ if cmd_name == "build-image":
+ print_deprecated("build-image", "ci-image build")
+ return ci_image.get_command(ctx, "build")
+ if cmd_name == "build-prod-image":
+ print_deprecated("build-prod-image", "prod-image build")
+ return prod_image.get_command(ctx, "build")
+ if cmd_name == "tests":
+ print_deprecated("tests", "testing tests")
+ return testing.get_command(ctx, "tests")
+ if cmd_name == "config":
+ print_deprecated("config", "setup config")
+ return setup.get_command(ctx, "config")
+ if cmd_name == "setup-autocomplete":
+ print_deprecated("setup-autocomplete", "setup autocomplete")
+ return setup.get_command(ctx, "autocomplete")
+ if cmd_name == "version":
# version alias does not need to be deprecated. It's ok to keep it also at top level
# even if it is not displayed in help
- return setup.get_command(ctx, 'version')
+ return setup.get_command(ctx, "version")
return None
@click.group(
cls=MainGroupWithAliases,
invoke_without_command=True,
- context_settings={'help_option_names': ['-h', '--help']},
+ context_settings={"help_option_names": ["-h", "--help"]},
)
@option_python
@option_backend
@@ -123,15 +123,15 @@ def check_for_python_emulation():
from airflow_breeze.utils.console import get_console
get_console().print(
- f'\n\n[error]Your Python architecture is {python_machine} and '
- f'system architecture is {system_machine}[/]'
+ f"\n\n[error]Your Python architecture is {python_machine} and "
+ f"system architecture is {system_machine}[/]"
)
get_console().print(
- '[warning]This is very bad and your Python is 10x slower as it is emulated[/]'
+ "[warning]This is very bad and your Python is 10x slower as it is emulated[/]"
)
get_console().print(
- '[warning]You likely installed your Python wrongly and you should '
- 'remove it and reinstall from scratch[/]\n'
+ "[warning]You likely installed your Python wrongly and you should "
+ "remove it and reinstall from scratch[/]\n"
)
from inputimeout import inputimeout
@@ -139,7 +139,7 @@ def check_for_python_emulation():
prompt="Are you REALLY sure you want to continue? (press y otherwise we exit in 20s) ",
timeout=20,
)
- if not user_status.upper() in ['Y', 'YES']:
+ if not user_status.upper() in ["Y", "YES"]:
sys.exit(1)
except subprocess.CalledProcessError:
pass
@@ -148,7 +148,7 @@ def check_for_python_emulation():
def check_for_rosetta_environment():
- if sys.platform != 'darwin':
+ if sys.platform != "darwin":
return
try:
runs_in_rosetta = subprocess.check_output(
@@ -156,25 +156,25 @@ def check_for_rosetta_environment():
text=True,
stderr=subprocess.DEVNULL,
).strip()
- if runs_in_rosetta == '1':
+ if runs_in_rosetta == "1":
from airflow_breeze.utils.console import get_console
get_console().print(
- '\n\n[error]You are starting breeze in `rosetta 2` emulated environment on Mac[/]\n'
+ "\n\n[error]You are starting breeze in `rosetta 2` emulated environment on Mac[/]\n"
)
get_console().print(
- '[warning]This is very bad and your Python is 10x slower as it is emulated[/]\n'
+ "[warning]This is very bad and your Python is 10x slower as it is emulated[/]\n"
)
get_console().print(
- 'You have emulated Python interpreter (Intel rather than ARM). You should check:\n\n'
+ "You have emulated Python interpreter (Intel rather than ARM). You should check:\n\n"
' * Your IDE (PyCharm/VSCode/Intellij): the "About" window should show `aarch64` '
'not `x86_64` in "Runtime version".\n'
' * Your python: run "python -c '
'import platform; print(platform.uname().machine)"). '
- 'It should show `arm64` not `x86_64`.\n'
+ "It should show `arm64` not `x86_64`.\n"
' * Your `brew`: run "brew config" and it should show `arm` in CPU line not `x86`.\n\n'
- 'If you have mixed Intel/ARM binaries installed you should likely nuke and '
- 'reinstall your development environment (including brew and Python) from scratch!\n\n'
+ "If you have mixed Intel/ARM binaries installed you should likely nuke and "
+ "reinstall your development environment (including brew and Python) from scratch!\n\n"
)
from inputimeout import inputimeout
@@ -182,7 +182,7 @@ def check_for_rosetta_environment():
prompt="Are you REALLY sure you want to continue? (press y otherwise we exit in 20s) ",
timeout=20,
)
- if not user_status.upper() in ['Y', 'YES']:
+ if not user_status.upper() in ["Y", "YES"]:
sys.exit(1)
except subprocess.CalledProcessError:
pass
@@ -195,9 +195,9 @@ def check_for_rosetta_environment():
help="Cleans the cache of parameters, docker cache and optionally built CI/PROD images.",
)
@click.option(
- '--all',
+ "--all",
is_flag=True,
- help='Also remove currently downloaded Breeze images.',
+ help="Also remove currently downloaded Breeze images.",
)
@option_verbose
@option_answer
@@ -213,12 +213,12 @@ def cleanup(verbose: bool, dry_run: bool, github_repository: str, all: bool, ans
get_console().print("[info]Removing cache of parameters, and cleans up docker cache[/]")
if all:
docker_images_command_to_execute = [
- 'docker',
- 'images',
- '--filter',
- 'label=org.apache.airflow.image',
- '--format',
- '{{.Repository}}:{{.Tag}}',
+ "docker",
+ "images",
+ "--filter",
+ "label=org.apache.airflow.image",
+ "--format",
+ "{{.Repository}}:{{.Tag}}",
]
command_result = run_command(
docker_images_command_to_execute, verbose=verbose, text=True, capture_output=True
@@ -230,9 +230,9 @@ def cleanup(verbose: bool, dry_run: bool, github_repository: str, all: bool, ans
get_console().print(f"[info] * {image}[/]")
get_console().print()
docker_rmi_command_to_execute = [
- 'docker',
- 'rmi',
- '--force',
+ "docker",
+ "rmi",
+ "--force",
]
docker_rmi_command_to_execute.extend(images)
given_answer = user_confirm("Are you sure with the removal?")
@@ -245,7 +245,7 @@ def cleanup(verbose: bool, dry_run: bool, github_repository: str, all: bool, ans
get_console().print("Pruning docker images")
given_answer = user_confirm("Are you sure with the removal?")
if given_answer == Answer.YES:
- system_prune_command_to_execute = ['docker', 'system', 'prune']
+ system_prune_command_to_execute = ["docker", "system", "prune"]
run_command(
system_prune_command_to_execute,
verbose=verbose,
diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
index 32e7de2dd1..cdddc32cf6 100644
--- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
@@ -145,7 +145,7 @@ def start_building(prod_image_params: BuildProdParams, dry_run: bool, verbose: b
@click.group(
- cls=BreezeGroup, name='prod-image', help="Tools that developers can use to manually manage PROD images"
+ cls=BreezeGroup, name="prod-image", help="Tools that developers can use to manually manage PROD images"
)
def prod_image():
pass
@@ -154,7 +154,7 @@ def prod_image():
@option_verbose
@option_dry_run
@option_answer
-@prod_image.command(name='build')
+@prod_image.command(name="build")
@option_python
@option_run_in_parallel
@option_parallelism
@@ -175,43 +175,43 @@ def prod_image():
@option_empty_image
@option_airflow_constraints_mode_prod
@click.option(
- '--installation-method',
+ "--installation-method",
help="Install Airflow from: sources or PyPI.",
type=BetterChoice(ALLOWED_INSTALLATION_METHODS),
)
@option_install_providers_from_sources
@click.option(
- '--install-packages-from-context',
- help='Install wheels from local docker-context-files when building image. '
- 'Implies --disable-airflow-repo-cache.',
+ "--install-packages-from-context",
+ help="Install wheels from local docker-context-files when building image. "
+ "Implies --disable-airflow-repo-cache.",
is_flag=True,
)
@click.option(
- '--cleanup-context',
- help='Clean up docker context files before running build (cannot be used together'
- ' with --install-packages-from-context).',
+ "--cleanup-context",
+ help="Clean up docker context files before running build (cannot be used together"
+ " with --install-packages-from-context).",
is_flag=True,
)
@click.option(
- '--airflow-extras',
+ "--airflow-extras",
default=",".join(DEFAULT_EXTRAS),
show_default=True,
help="Extras to install by default.",
)
-@click.option('--disable-mysql-client-installation', help="Do not install MySQL client.", is_flag=True)
-@click.option('--disable-mssql-client-installation', help="Do not install MsSQl client.", is_flag=True)
-@click.option('--disable-postgres-client-installation', help="Do not install Postgres client.", is_flag=True)
+@click.option("--disable-mysql-client-installation", help="Do not install MySQL client.", is_flag=True)
+@click.option("--disable-mssql-client-installation", help="Do not install MsSQl client.", is_flag=True)
+@click.option("--disable-postgres-client-installation", help="Do not install Postgres client.", is_flag=True)
@click.option(
- '--disable-airflow-repo-cache',
+ "--disable-airflow-repo-cache",
help="Disable cache from Airflow repository during building.",
is_flag=True,
)
@click.option(
- '--install-airflow-reference',
+ "--install-airflow-reference",
help="Install Airflow using GitHub tag or branch.",
)
@option_airflow_constraints_reference_build
-@click.option('-V', '--install-airflow-version', help="Install version of Airflow from PyPI.")
+@click.option("-V", "--install-airflow-version", help="Install version of Airflow from PyPI.")
@option_additional_extras
@option_additional_dev_apt_deps
@option_additional_runtime_apt_deps
@@ -281,7 +281,7 @@ def build(
run_build(prod_image_params=params)
-@prod_image.command(name='pull')
+@prod_image.command(name="pull")
@option_verbose
@option_dry_run
@option_python
@@ -297,7 +297,7 @@ def build(
@option_wait_for_image
@option_tag_as_latest
@option_verify
-@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra_pytest_args", nargs=-1, type=click.UNPROCESSED)
def pull_prod_image(
verbose: bool,
dry_run: bool,
@@ -362,7 +362,7 @@ def pull_prod_image(
@prod_image.command(
- name='verify',
+ name="verify",
context_settings=dict(
ignore_unknown_options=True,
allow_extra_args=True,
@@ -376,11 +376,11 @@ def pull_prod_image(
@option_image_name
@option_pull
@click.option(
- '--slim-image',
- help='The image to verify is slim and non-slim tests should be skipped.',
+ "--slim-image",
+ help="The image to verify is slim and non-slim tests should be skipped.",
is_flag=True,
)
-@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra_pytest_args", nargs=-1, type=click.UNPROCESSED)
def verify(
verbose: bool,
dry_run: bool,
@@ -408,7 +408,7 @@ def verify(
output=None,
verbose=verbose,
dry_run=dry_run,
- image_type='PROD',
+ image_type="PROD",
extra_pytest_args=extra_pytest_args,
slim_image=slim_image,
)
@@ -424,9 +424,9 @@ def clean_docker_context_files(verbose: bool, dry_run: bool):
if dry_run:
return
with contextlib.suppress(FileNotFoundError):
- context_files_to_delete = DOCKER_CONTEXT_DIR.glob('**/*')
+ context_files_to_delete = DOCKER_CONTEXT_DIR.glob("**/*")
for file_to_delete in context_files_to_delete:
- if file_to_delete.name != '.README.md':
+ if file_to_delete.name != ".README.md":
file_to_delete.unlink()
@@ -439,25 +439,25 @@ def check_docker_context_files(install_packages_from_context: bool):
:param install_packages_from_context: whether we want to install from docker-context-files
"""
- context_file = DOCKER_CONTEXT_DIR.glob('**/*')
+ context_file = DOCKER_CONTEXT_DIR.glob("**/*")
number_of_context_files = len(
- [context for context in context_file if context.is_file() and context.name != '.README.md']
+ [context for context in context_file if context.is_file() and context.name != ".README.md"]
)
if number_of_context_files == 0:
if install_packages_from_context:
- get_console().print('[warning]\nERROR! You want to install packages from docker-context-files')
- get_console().print('[warning]\n but there are no packages to install in this folder.')
+ get_console().print("[warning]\nERROR! You want to install packages from docker-context-files")
+ get_console().print("[warning]\n but there are no packages to install in this folder.")
sys.exit(1)
else:
if not install_packages_from_context:
get_console().print(
- '[warning]\n ERROR! There are some extra files in docker-context-files except README.md'
+ "[warning]\n ERROR! There are some extra files in docker-context-files except README.md"
)
- get_console().print('[warning]\nAnd you did not choose --install-packages-from-context flag')
+ get_console().print("[warning]\nAnd you did not choose --install-packages-from-context flag")
get_console().print(
- '[warning]\nThis might result in unnecessary cache invalidation and long build times'
+ "[warning]\nThis might result in unnecessary cache invalidation and long build times"
)
- get_console().print('[warning]Please restart the command with --cleanup-context switch\n')
+ get_console().print("[warning]Please restart the command with --cleanup-context switch\n")
sys.exit(1)
@@ -505,7 +505,7 @@ def run_build_production_image(
else:
if prod_image_params.empty_image:
env = os.environ.copy()
- env['DOCKER_BUILDKIT'] = "1"
+ env["DOCKER_BUILDKIT"] = "1"
get_console(output=output).print(
f"\n[info]Building empty PROD Image for Python {prod_image_params.python}\n"
)
diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index acfd485016..9081987ef7 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -81,7 +81,7 @@ option_debug_release_management = click.option(
"--debug",
is_flag=True,
help="Drop user in shell instead of running the command. Useful for debugging.",
- envvar='DEBUG',
+ envvar="DEBUG",
)
@@ -110,7 +110,7 @@ def run_with_debug(
params.airflow_image_name_with_tag,
]
if debug:
- cmd_string = ' '.join([shlex.quote(s) for s in command if s != "-c"])
+ cmd_string = " ".join([shlex.quote(s) for s in command if s != "-c"])
base_command.extend(
[
"-c",
@@ -144,7 +144,7 @@ echo -e '\\e[34mRun this command to debug:
@click.group(
cls=BreezeGroup,
- name='release-management',
+ name="release-management",
help="Tools that release managers can use to prepare and manage Airflow releases",
)
def release_management():
@@ -152,7 +152,7 @@ def release_management():
@release_management.command(
- name='prepare-airflow-package',
+ name="prepare-airflow-package",
help="Prepare sdist/whl package of Airflow.",
)
@option_verbose
@@ -194,7 +194,7 @@ def prepare_airflow_packages(
@release_management.command(
- name='prepare-provider-documentation',
+ name="prepare-provider-documentation",
help="Prepare CHANGELOG, README and COMMITS information for providers.",
)
@option_verbose
@@ -225,7 +225,7 @@ def prepare_provider_documentation(
result_command = run_with_debug(
params=shell_params,
command=cmd_to_run,
- enable_input=not answer or answer.lower() not in ['y', 'yes'],
+ enable_input=not answer or answer.lower() not in ["y", "yes"],
verbose=verbose,
dry_run=dry_run,
debug=debug,
@@ -234,7 +234,7 @@ def prepare_provider_documentation(
@release_management.command(
- name='prepare-provider-packages',
+ name="prepare-provider-packages",
help="Prepare sdist/whl packages of Airflow Providers.",
)
@option_verbose
@@ -243,9 +243,9 @@ def prepare_provider_documentation(
@option_package_format
@option_version_suffix_for_pypi
@click.option(
- '--package-list-file',
- type=click.File('rt'),
- help='Read list of packages from text file (one package per line).',
+ "--package-list-file",
+ type=click.File("rt"),
+ help="Read list of packages from text file (one package per line).",
)
@option_debug_release_management
@argument_packages
@@ -309,7 +309,7 @@ def run_generate_constraints(
CONSTRAINT_PROGRESS_MATCHER = (
- r'Found|Uninstalling|uninstalled|Collecting|Downloading|eta|Running|Installing|built|Attempting'
+ r"Found|Uninstalling|uninstalled|Collecting|Downloading|eta|Running|Installing|built|Attempting"
)
@@ -360,7 +360,7 @@ def run_generate_constraints_in_parallel(
@release_management.command(
- name='generate-constraints',
+ name="generate-constraints",
help="Generates pinned constraint files with all extras from setup.py in parallel.",
)
@option_verbose
@@ -464,7 +464,7 @@ def generate_constraints(
@release_management.command(
- name='verify-provider-packages',
+ name="verify-provider-packages",
help="Verifies if all provider code is following expectations for providers.",
)
@option_use_airflow_version
@@ -474,7 +474,7 @@ def generate_constraints(
"--skip-constraints",
is_flag=True,
help="Do not use constraints when installing providers.",
- envvar='SKIP_CONSTRAINTS',
+ envvar="SKIP_CONSTRAINTS",
)
@option_use_packages_from_dist
@option_installation_package_format
@@ -526,7 +526,7 @@ def convert_build_args_dict_to_array_of_args(build_args: dict[str, str]) -> list
array_of_args = []
for key, value in build_args.items():
array_of_args.append("--build-arg")
- array_of_args.append(f'{key}={value}')
+ array_of_args.append(f"{key}={value}")
return array_of_args
@@ -542,33 +542,33 @@ def alias_image(image_from: str, image_to: str, dry_run: bool, verbose: bool):
@release_management.command(
name="release-prod-images", help="Release production images to DockerHub (needs DockerHub permissions)."
)
-@click.option('--airflow-version', required=True, help="Airflow version to release (2.3.0, 2.3.0rc1 etc.)")
+@click.option("--airflow-version", required=True, help="Airflow version to release (2.3.0, 2.3.0rc1 etc.)")
@click.option(
- '--dockerhub-repo',
+ "--dockerhub-repo",
default=APACHE_AIRFLOW_GITHUB_REPOSITORY,
show_default=True,
help="DockerHub repository for the images",
)
@click.option(
- '--slim-images',
+ "--slim-images",
is_flag=True,
- help='Whether to prepare slim images instead of the regular ones.',
+ help="Whether to prepare slim images instead of the regular ones.",
)
@click.option(
- '--limit-python',
+ "--limit-python",
type=BetterChoice(CURRENT_PYTHON_MAJOR_MINOR_VERSIONS),
help="Specific python to build slim images for (if not specified - the images are built for all"
" available python versions)",
)
@click.option(
- '--limit-platform',
+ "--limit-platform",
type=BetterChoice(ALLOWED_PLATFORMS),
default=MULTI_PLATFORM,
show_default=True,
help="Specific platform to build images for (if not specified, multiplatform images will be built.",
)
@click.option(
- '--skip-latest',
+ "--skip-latest",
is_flag=True,
help="Whether to skip publishing the latest images (so that 'latest' images are not updated). "
"This should only be used if you release image for previous branches. Automatically set when "
@@ -605,7 +605,7 @@ def release_prod_images(
"[info]Also tagging the images with latest tags as this is release version.[/]"
)
result_docker_buildx = run_command(
- ["docker", 'buildx', 'version'], check=False, dry_run=dry_run, verbose=verbose
+ ["docker", "buildx", "version"], check=False, dry_run=dry_run, verbose=verbose
)
if result_docker_buildx.returncode != 0:
get_console().print("[error]Docker buildx plugin must be installed to release the images[/]")
@@ -613,7 +613,7 @@ def release_prod_images(
get_console().print("See https://docs.docker.com/buildx/working-with-buildx/ for installation info.")
sys.exit(1)
result_inspect_builder = run_command(
- ["docker", 'buildx', 'inspect', 'airflow_cache'], check=False, dry_run=dry_run, verbose=verbose
+ ["docker", "buildx", "inspect", "airflow_cache"], check=False, dry_run=dry_run, verbose=verbose
)
if result_inspect_builder.returncode != 0:
get_console().print("[error]Airflow Cache builder must be configured to release the images[/]")
@@ -623,7 +623,7 @@ def release_prod_images(
" for instructions on setting it up."
)
sys.exit(1)
- result_regctl = run_command(["regctl", 'version'], check=False, dry_run=dry_run, verbose=verbose)
+ result_regctl = run_command(["regctl", "version"], check=False, dry_run=dry_run, verbose=verbose)
if result_regctl.returncode != 0:
get_console().print("[error]Regctl must be installed and on PATH to release the images[/]")
get_console().print()
diff --git a/dev/breeze/src/airflow_breeze/commands/setup_commands.py b/dev/breeze/src/airflow_breeze/commands/setup_commands.py
index eca16ecd12..a7b00c6eaf 100644
--- a/dev/breeze/src/airflow_breeze/commands/setup_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/setup_commands.py
@@ -60,20 +60,20 @@ from airflow_breeze.utils.run_utils import run_command
from airflow_breeze.utils.visuals import ASCIIART, ASCIIART_STYLE
-@click.group(cls=BreezeGroup, name='setup', help='Tools that developers can use to configure Breeze')
+@click.group(cls=BreezeGroup, name="setup", help="Tools that developers can use to configure Breeze")
def setup():
pass
@click.option(
- '-a',
- '--use-current-airflow-sources',
+ "-a",
+ "--use-current-airflow-sources",
is_flag=True,
- help='Use current workdir Airflow sources for upgrade'
+ help="Use current workdir Airflow sources for upgrade"
+ (f" rather than {get_installation_airflow_sources()}." if not generating_command_images() else "."),
)
@setup.command(
- name='self-upgrade',
+ name="self-upgrade",
help="Self upgrade Breeze. By default it re-installs Breeze "
f"from {get_installation_airflow_sources()}."
if not generating_command_images()
@@ -95,21 +95,21 @@ def self_upgrade(use_current_airflow_sources: bool):
@option_verbose
@option_dry_run
@click.option(
- '-f',
- '--force',
+ "-f",
+ "--force",
is_flag=True,
- help='Force autocomplete setup even if already setup before (overrides the setup).',
+ help="Force autocomplete setup even if already setup before (overrides the setup).",
)
@option_answer
-@setup.command(name='autocomplete')
+@setup.command(name="autocomplete")
def autocomplete(verbose: bool, dry_run: bool, force: bool, answer: str | None):
"""
Enables autocompletion of breeze commands.
"""
# Determine if the shell is bash/zsh/powershell. It helps to build the autocomplete path
- detected_shell = os.environ.get('SHELL')
+ detected_shell = os.environ.get("SHELL")
detected_shell = None if detected_shell is None else detected_shell.split(os.sep)[-1]
- if detected_shell not in ['bash', 'zsh', 'fish']:
+ if detected_shell not in ["bash", "zsh", "fish"]:
get_console().print(f"\n[error] The shell {detected_shell} is not supported for autocomplete![/]\n")
sys.exit(1)
get_console().print(f"Installing {detected_shell} completion for local user")
@@ -122,31 +122,31 @@ def autocomplete(verbose: bool, dry_run: bool, force: bool, answer: str | None):
"Should we proceed with modifying the script?", default_answer=Answer.NO, timeout=STANDARD_TIMEOUT
)
if given_answer == Answer.YES:
- if detected_shell == 'bash':
- script_path = str(Path('~').expanduser() / '.bash_completion')
+ if detected_shell == "bash":
+ script_path = str(Path("~").expanduser() / ".bash_completion")
command_to_execute = f"source {autocomplete_path}"
write_to_shell(command_to_execute, dry_run, script_path, force)
- elif detected_shell == 'zsh':
- script_path = str(Path('~').expanduser() / '.zshrc')
+ elif detected_shell == "zsh":
+ script_path = str(Path("~").expanduser() / ".zshrc")
command_to_execute = f"source {autocomplete_path}"
write_to_shell(command_to_execute, dry_run, script_path, force)
- elif detected_shell == 'fish':
+ elif detected_shell == "fish":
# Include steps for fish shell
- script_path = str(Path('~').expanduser() / f'.config/fish/completions/{NAME}.fish')
+ script_path = str(Path("~").expanduser() / f".config/fish/completions/{NAME}.fish")
if os.path.exists(script_path) and not force:
get_console().print(
"\n[warning]Autocompletion is already setup. Skipping. "
"You can force autocomplete installation by adding --force/]\n"
)
else:
- with open(autocomplete_path) as source_file, open(script_path, 'w') as destination_file:
+ with open(autocomplete_path) as source_file, open(script_path, "w") as destination_file:
for line in source_file:
destination_file.write(line)
else:
# Include steps for powershell
- subprocess.check_call(['powershell', 'Set-ExecutionPolicy Unrestricted -Scope CurrentUser'])
+ subprocess.check_call(["powershell", "Set-ExecutionPolicy Unrestricted -Scope CurrentUser"])
script_path = (
- subprocess.check_output(['powershell', '-NoProfile', 'echo $profile']).decode("utf-8").strip()
+ subprocess.check_output(["powershell", "-NoProfile", "echo $profile"]).decode("utf-8").strip()
)
command_to_execute = f". {autocomplete_path}"
write_to_shell(command_to_execute, dry_run, script_path, force)
@@ -181,16 +181,16 @@ def version(verbose: bool):
)
-@setup.command(name='config')
+@setup.command(name="config")
@option_python
@option_backend
@option_postgres_version
@option_mysql_version
@option_mssql_version
-@click.option('-C/-c', '--cheatsheet/--no-cheatsheet', help="Enable/disable cheatsheet.", default=None)
-@click.option('-A/-a', '--asciiart/--no-asciiart', help="Enable/disable ASCIIart.", default=None)
+@click.option("-C/-c", "--cheatsheet/--no-cheatsheet", help="Enable/disable cheatsheet.", default=None)
+@click.option("-A/-a", "--asciiart/--no-asciiart", help="Enable/disable ASCIIart.", default=None)
@click.option(
- '--colour/--no-colour',
+ "--colour/--no-colour",
help="Enable/disable Colour mode (useful for colour blind-friendly communication).",
default=None,
)
@@ -214,24 +214,24 @@ def change_config(
if asciiart is not None:
if asciiart:
delete_cache(asciiart_file)
- get_console().print('[info]Enable ASCIIART![/]')
+ get_console().print("[info]Enable ASCIIART![/]")
else:
touch_cache_file(asciiart_file)
- get_console().print('[info]Disable ASCIIART![/]')
+ get_console().print("[info]Disable ASCIIART![/]")
if cheatsheet is not None:
if cheatsheet:
delete_cache(cheatsheet_file)
- get_console().print('[info]Enable Cheatsheet[/]')
+ get_console().print("[info]Enable Cheatsheet[/]")
elif cheatsheet is not None:
touch_cache_file(cheatsheet_file)
- get_console().print('[info]Disable Cheatsheet[/]')
+ get_console().print("[info]Disable Cheatsheet[/]")
if colour is not None:
if colour:
delete_cache(colour_file)
- get_console().print('[info]Enable Colour[/]')
+ get_console().print("[info]Enable Colour[/]")
elif colour is not None:
touch_cache_file(colour_file)
- get_console().print('[info]Disable Colour[/]')
+ get_console().print("[info]Disable Colour[/]")
def get_status(file: str):
return "disabled" if check_if_cache_exists(file) else "enabled"
@@ -270,11 +270,11 @@ def get_command_hash_export(verbose: bool) -> str:
if verbose:
get_stderr_console().print(the_context_dict)
hashes.append(f"main:{dict_hash(the_context_dict['command']['params'])}")
- commands_dict = the_context_dict['command']['commands']
+ commands_dict = the_context_dict["command"]["commands"]
for command in sorted(commands_dict.keys()):
current_command_dict = commands_dict[command]
- if 'commands' in current_command_dict:
- subcommands = current_command_dict['commands']
+ if "commands" in current_command_dict:
+ subcommands = current_command_dict["commands"]
for subcommand in sorted(subcommands.keys()):
hashes.append(f"{command}:{subcommand}:{dict_hash(subcommands[subcommand])}")
hashes.append(f"{command}:{dict_hash(current_command_dict)}")
@@ -299,7 +299,7 @@ def write_to_shell(command_to_execute: str, dry_run: bool, script_path: str, for
else:
backup(script_path_file)
remove_autogenerated_code(script_path)
- text = ''
+ text = ""
if script_path_file.exists():
get_console().print(f"\nModifying the {script_path} file!\n")
get_console().print(f"\nCopy of the original file is held in {script_path}.bak !\n")
@@ -362,9 +362,9 @@ def get_commands() -> list[str]:
content = COMMAND_HASH_FILE_PATH.read_text()
for line in content.splitlines():
strip_line = line.strip()
- if strip_line == '' or strip_line.startswith("#"):
+ if strip_line == "" or strip_line.startswith("#"):
continue
- results.append(':'.join(strip_line.split(":")[:-1]))
+ results.append(":".join(strip_line.split(":")[:-1]))
return results
@@ -380,9 +380,9 @@ def get_command_hash_dict(hash_file_content: str) -> dict[str, str]:
results = {}
for line in hash_file_content.splitlines():
strip_line = line.strip()
- if strip_line.strip() == '' or strip_line.startswith("#"):
+ if strip_line.strip() == "" or strip_line.startswith("#"):
continue
- command = ':'.join(strip_line.split(":")[:-1])
+ command = ":".join(strip_line.split(":")[:-1])
the_hash = strip_line.split(":")[-1]
results[command] = the_hash
return results
@@ -404,12 +404,12 @@ def regenerate_help_images_for_all_commands(
console.print("[error]The --check-only flag cannot be used with --command flag.")
return 2
env = os.environ.copy()
- env['AIRFLOW_SOURCES_ROOT'] = str(AIRFLOW_SOURCES_ROOT)
- env['RECORD_BREEZE_WIDTH'] = SCREENSHOT_WIDTH
- env['RECORD_BREEZE_TITLE'] = "Breeze commands"
- env['RECORD_BREEZE_OUTPUT_FILE'] = str(BREEZE_IMAGES_DIR / "output-commands.svg")
- env['TERM'] = "xterm-256color"
- env['PYTHONPATH'] = str(BREEZE_SOURCES_DIR)
+ env["AIRFLOW_SOURCES_ROOT"] = str(AIRFLOW_SOURCES_ROOT)
+ env["RECORD_BREEZE_WIDTH"] = SCREENSHOT_WIDTH
+ env["RECORD_BREEZE_TITLE"] = "Breeze commands"
+ env["RECORD_BREEZE_OUTPUT_FILE"] = str(BREEZE_IMAGES_DIR / "output-commands.svg")
+ env["TERM"] = "xterm-256color"
+ env["PYTHONPATH"] = str(BREEZE_SOURCES_DIR)
new_hash_text_dump = PREAMBLE + get_command_hash_export(verbose=verbose)
regenerate_all_commands = False
commands_list = list(commands)
@@ -451,29 +451,29 @@ def regenerate_help_images_for_all_commands(
regenerate_all_commands = True
if regenerate_all_commands:
env = os.environ.copy()
- env['AIRFLOW_SOURCES_ROOT'] = str(AIRFLOW_SOURCES_ROOT)
- env['RECORD_BREEZE_WIDTH'] = SCREENSHOT_WIDTH
- env['RECORD_BREEZE_TITLE'] = "Breeze commands"
- env['RECORD_BREEZE_OUTPUT_FILE'] = str(BREEZE_IMAGES_DIR / "output-commands.svg")
- env['TERM'] = "xterm-256color"
+ env["AIRFLOW_SOURCES_ROOT"] = str(AIRFLOW_SOURCES_ROOT)
+ env["RECORD_BREEZE_WIDTH"] = SCREENSHOT_WIDTH
+ env["RECORD_BREEZE_TITLE"] = "Breeze commands"
+ env["RECORD_BREEZE_OUTPUT_FILE"] = str(BREEZE_IMAGES_DIR / "output-commands.svg")
+ env["TERM"] = "xterm-256color"
run_command(
- ['breeze', "--help"],
+ ["breeze", "--help"],
env=env,
verbose=verbose,
dry_run=dry_run,
)
for command in commands_list:
- if command == 'main':
+ if command == "main":
continue
if ":" not in command:
env = os.environ.copy()
- env['AIRFLOW_SOURCES_ROOT'] = str(AIRFLOW_SOURCES_ROOT)
- env['RECORD_BREEZE_WIDTH'] = SCREENSHOT_WIDTH
- env['RECORD_BREEZE_TITLE'] = f"Command: {command}"
- env['RECORD_BREEZE_OUTPUT_FILE'] = str(BREEZE_IMAGES_DIR / f"output_{command}.svg")
- env['TERM'] = "xterm-256color"
+ env["AIRFLOW_SOURCES_ROOT"] = str(AIRFLOW_SOURCES_ROOT)
+ env["RECORD_BREEZE_WIDTH"] = SCREENSHOT_WIDTH
+ env["RECORD_BREEZE_TITLE"] = f"Command: {command}"
+ env["RECORD_BREEZE_OUTPUT_FILE"] = str(BREEZE_IMAGES_DIR / f"output_{command}.svg")
+ env["TERM"] = "xterm-256color"
run_command(
- ['breeze', command, "--help"],
+ ["breeze", command, "--help"],
env=env,
verbose=verbose,
dry_run=dry_run,
@@ -481,13 +481,13 @@ def regenerate_help_images_for_all_commands(
else:
split_command = command.split(":")
env = os.environ.copy()
- env['AIRFLOW_SOURCES_ROOT'] = str(AIRFLOW_SOURCES_ROOT)
- env['RECORD_BREEZE_WIDTH'] = SCREENSHOT_WIDTH
- env['RECORD_BREEZE_TITLE'] = f"Command: {split_command[0]} {split_command[1]}"
- env['RECORD_BREEZE_OUTPUT_FILE'] = str(
+ env["AIRFLOW_SOURCES_ROOT"] = str(AIRFLOW_SOURCES_ROOT)
+ env["RECORD_BREEZE_WIDTH"] = SCREENSHOT_WIDTH
+ env["RECORD_BREEZE_TITLE"] = f"Command: {split_command[0]} {split_command[1]}"
+ env["RECORD_BREEZE_OUTPUT_FILE"] = str(
BREEZE_IMAGES_DIR / f"output_{split_command[0]}_{split_command[1]}.svg"
)
- env['TERM'] = "xterm-256color"
+ env["TERM"] = "xterm-256color"
run_command(
[
"breeze",
@@ -506,16 +506,16 @@ def regenerate_help_images_for_all_commands(
@setup.command(name="regenerate-command-images", help="Regenerate breeze command images.")
-@click.option("--force", is_flag=True, help="Forces regeneration of all images", envvar='FORCE')
+@click.option("--force", is_flag=True, help="Forces regeneration of all images", envvar="FORCE")
@click.option(
"--check-only",
is_flag=True,
help="Only check if some images need to be regenerated. Return 0 if no need or 1 if needed. "
"Cannot be used together with --command flag or --force.",
- envvar='CHECK_ONLY',
+ envvar="CHECK_ONLY",
)
@click.option(
- '--command',
+ "--command",
help="Command(s) to regenerate images for (optional, might be repeated)",
show_default=True,
multiple=True,
diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py
index 2f205e915b..3d8e962180 100644
--- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py
@@ -72,13 +72,13 @@ from airflow_breeze.utils.run_utils import get_filesystem_type, run_command
LOW_MEMORY_CONDITION = 8 * 1024 * 1024 * 1024
-@click.group(cls=BreezeGroup, name='testing', help='Tools that developers can use to run tests')
+@click.group(cls=BreezeGroup, name="testing", help="Tools that developers can use to run tests")
def testing():
pass
@testing.command(
- name='docker-compose-tests',
+ name="docker-compose-tests",
context_settings=dict(
ignore_unknown_options=True,
allow_extra_args=True,
@@ -90,7 +90,7 @@ def testing():
@option_github_repository
@option_image_tag_for_running
@option_image_name
-@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra_pytest_args", nargs=-1, type=click.UNPROCESSED)
def docker_compose_tests(
verbose: bool,
dry_run: bool,
@@ -116,8 +116,8 @@ def docker_compose_tests(
sys.exit(return_code)
-TEST_PROGRESS_REGEXP = r'tests/.*|.*=====.*'
-PERCENT_TEST_PROGRESS_REGEXP = r'^tests/.*\[[ \d%]*\].*'
+TEST_PROGRESS_REGEXP = r"tests/.*|.*=====.*"
+PERCENT_TEST_PROGRESS_REGEXP = r"^tests/.*\[[ \d%]*\].*"
def _run_test(
@@ -130,7 +130,7 @@ def _run_test(
verbose: bool,
) -> tuple[int, str]:
env_variables = get_env_variables_for_docker_commands(exec_shell_params)
- env_variables['RUN_TESTS'] = "true"
+ env_variables["RUN_TESTS"] = "true"
if test_timeout:
env_variables["TEST_TIMEOUT"] = str(test_timeout)
if db_reset:
@@ -148,34 +148,34 @@ def _run_test(
int_list = list(integration)
int_list.append("kerberos")
integration = tuple(int_list)
- env_variables["LIST_OF_INTEGRATION_TESTS_TO_RUN"] = ' '.join(list(integration))
+ env_variables["LIST_OF_INTEGRATION_TESTS_TO_RUN"] = " ".join(list(integration))
project_name = _file_name_from_test_type(exec_shell_params.test_type)
down_cmd = [
*DOCKER_COMPOSE_COMMAND,
"--project-name",
- f'airflow-test-{project_name}',
- 'down',
- '--remove-orphans',
+ f"airflow-test-{project_name}",
+ "down",
+ "--remove-orphans",
]
run_command(down_cmd, verbose=verbose, dry_run=dry_run, env=env_variables, output=output, check=False)
run_cmd = [
*DOCKER_COMPOSE_COMMAND,
"--project-name",
- f'airflow-test-{project_name}',
- 'run',
- '-T',
- '--service-ports',
- '--rm',
- 'airflow',
+ f"airflow-test-{project_name}",
+ "run",
+ "-T",
+ "--service-ports",
+ "--rm",
+ "airflow",
]
run_cmd.extend(list(extra_pytest_args))
try:
result = run_command(
run_cmd, verbose=verbose, dry_run=dry_run, env=env_variables, output=output, check=False
)
- if os.environ.get('CI') == "true" and result.returncode != 0:
+ if os.environ.get("CI") == "true" and result.returncode != 0:
ps_result = run_command(
- ['docker', 'ps', '--all', '--format', '{{.Names}}'],
+ ["docker", "ps", "--all", "--format", "{{.Names}}"],
check=True,
capture_output=True,
text=True,
@@ -195,11 +195,11 @@ def _run_test(
[
*DOCKER_COMPOSE_COMMAND,
"--project-name",
- f'airflow-test-{project_name}',
- 'rm',
- '--stop',
- '--force',
- '-v',
+ f"airflow-test-{project_name}",
+ "rm",
+ "--stop",
+ "--force",
+ "-v",
],
verbose=False,
dry_run=dry_run,
@@ -282,7 +282,7 @@ def run_tests_in_parallel(
import psutil
memory_available = psutil.virtual_memory()
- if memory_available.available < LOW_MEMORY_CONDITION and exec_shell_params.backend in ['mssql', 'mysql']:
+ if memory_available.available < LOW_MEMORY_CONDITION and exec_shell_params.backend in ["mssql", "mysql"]:
# Run heavy tests sequentially
heavy_test_types = ["Core", "Integration", "Providers"]
if bool(set(heavy_test_types) & set(test_types_list)):
@@ -326,7 +326,7 @@ def run_tests_in_parallel(
@testing.command(
- name='tests',
+ name="tests",
help="Run the specified unit test targets.",
context_settings=dict(
ignore_unknown_options=True,
@@ -346,7 +346,7 @@ def run_tests_in_parallel(
@click.option(
"--test-type",
help="Type of test to run. Note that with Providers, you can also specify which provider "
- "tests should be run - for example --test-type \"Providers[airbyte,http]\"",
+ 'tests should be run - for example --test-type "Providers[airbyte,http]"',
default="All",
type=NotVerifiedBetterChoice(ALLOWED_TEST_TYPE_CHOICES),
)
@@ -370,7 +370,7 @@ def run_tests_in_parallel(
show_default=True,
envvar="TEST_TYPES",
)
-@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra_pytest_args", nargs=-1, type=click.UNPROCESSED)
def tests(
dry_run: bool,
verbose: bool,
@@ -393,7 +393,7 @@ def tests(
test_types: str,
mount_sources: str,
):
- docker_filesystem = get_filesystem_type('/var/lib/docker')
+ docker_filesystem = get_filesystem_type("/var/lib/docker")
get_console().print(f"Docker filesystem: {docker_filesystem}")
exec_shell_params = ShellParams(
verbose=verbose,
@@ -437,7 +437,7 @@ def tests(
@testing.command(
- name='helm-tests',
+ name="helm-tests",
help="Run Helm chart tests.",
context_settings=dict(
ignore_unknown_options=True,
@@ -448,7 +448,7 @@ def tests(
@option_verbose
@option_image_tag_for_running
@option_mount_sources
-@click.argument('extra_pytest_args', nargs=-1, type=click.UNPROCESSED)
+@click.argument("extra_pytest_args", nargs=-1, type=click.UNPROCESSED)
def helm_tests(
dry_run: bool,
verbose: bool,
@@ -463,10 +463,10 @@ def helm_tests(
mount_sources=mount_sources,
)
env_variables = get_env_variables_for_docker_commands(exec_shell_params)
- env_variables['RUN_TESTS'] = "true"
- env_variables['TEST_TYPE'] = 'Helm'
+ env_variables["RUN_TESTS"] = "true"
+ env_variables["TEST_TYPE"] = "Helm"
perform_environment_checks(verbose=verbose)
- cmd = [*DOCKER_COMPOSE_COMMAND, 'run', '--service-ports', '--rm', 'airflow']
+ cmd = [*DOCKER_COMPOSE_COMMAND, "run", "--service-ports", "--rm", "airflow"]
cmd.extend(list(extra_pytest_args))
result = run_command(cmd, verbose=verbose, dry_run=dry_run, env=env_variables, check=False)
sys.exit(result.returncode)
diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py
index bc42b1d3cf..4d5868bca9 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -37,32 +37,32 @@ ANSWER = ""
APACHE_AIRFLOW_GITHUB_REPOSITORY = "apache/airflow"
# Checked before putting in build cache
-ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ['3.7', '3.8', '3.9', '3.10']
+ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
DEFAULT_PYTHON_MAJOR_MINOR_VERSION = ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS[0]
ALLOWED_ARCHITECTURES = [Architecture.X86_64, Architecture.ARM]
-ALLOWED_BACKENDS = ['sqlite', 'mysql', 'postgres', 'mssql']
-ALLOWED_PROD_BACKENDS = ['mysql', 'postgres', 'mssql']
+ALLOWED_BACKENDS = ["sqlite", "mysql", "postgres", "mssql"]
+ALLOWED_PROD_BACKENDS = ["mysql", "postgres", "mssql"]
DEFAULT_BACKEND = ALLOWED_BACKENDS[0]
ALL_INTEGRATIONS = [
- 'cassandra',
- 'kerberos',
- 'mongo',
- 'openldap',
- 'pinot',
- 'rabbitmq',
- 'redis',
- 'statsd',
- 'trino',
+ "cassandra",
+ "kerberos",
+ "mongo",
+ "openldap",
+ "pinot",
+ "rabbitmq",
+ "redis",
+ "statsd",
+ "trino",
]
ALLOWED_INTEGRATIONS = [
*ALL_INTEGRATIONS,
- 'all',
+ "all",
]
-ALLOWED_KUBERNETES_VERSIONS = ['v1.25.2', 'v1.24.6', 'v1.23.12', 'v1.22.15', 'v1.21.14']
-ALLOWED_EXECUTORS = ['KubernetesExecutor', 'CeleryExecutor', 'LocalExecutor', 'CeleryKubernetesExecutor']
-ALLOWED_KIND_OPERATIONS = ['start', 'stop', 'restart', 'status', 'deploy', 'test', 'shell', 'k9s']
-ALLOWED_CONSTRAINTS_MODES_CI = ['constraints-source-providers', 'constraints', 'constraints-no-providers']
-ALLOWED_CONSTRAINTS_MODES_PROD = ['constraints', 'constraints-no-providers', 'constraints-source-providers']
+ALLOWED_KUBERNETES_VERSIONS = ["v1.25.2", "v1.24.6", "v1.23.12", "v1.22.15", "v1.21.14"]
+ALLOWED_EXECUTORS = ["KubernetesExecutor", "CeleryExecutor", "LocalExecutor", "CeleryKubernetesExecutor"]
+ALLOWED_KIND_OPERATIONS = ["start", "stop", "restart", "status", "deploy", "test", "shell", "k9s"]
+ALLOWED_CONSTRAINTS_MODES_CI = ["constraints-source-providers", "constraints", "constraints-no-providers"]
+ALLOWED_CONSTRAINTS_MODES_PROD = ["constraints", "constraints-no-providers", "constraints-source-providers"]
MOUNT_SELECTED = "selected"
MOUNT_ALL = "all"
@@ -70,9 +70,9 @@ MOUNT_SKIP = "skip"
MOUNT_REMOVE = "remove"
ALLOWED_MOUNT_OPTIONS = [MOUNT_SELECTED, MOUNT_ALL, MOUNT_SKIP, MOUNT_REMOVE]
-ALLOWED_POSTGRES_VERSIONS = ['10', '11', '12', '13', '14']
-ALLOWED_MYSQL_VERSIONS = ['5.7', '8']
-ALLOWED_MSSQL_VERSIONS = ['2017-latest', '2019-latest']
+ALLOWED_POSTGRES_VERSIONS = ["10", "11", "12", "13", "14"]
+ALLOWED_MYSQL_VERSIONS = ["5.7", "8"]
+ALLOWED_MSSQL_VERSIONS = ["2017-latest", "2019-latest"]
PIP_VERSION = "22.2.2"
@@ -83,14 +83,14 @@ def all_selective_test_types() -> tuple[str, ...]:
class SelectiveUnitTestTypes(Enum):
- ALWAYS = 'Always'
- API = 'API'
- CLI = 'CLI'
- CORE = 'Core'
- OTHER = 'Other'
- INTEGRATION = 'Integration'
- PROVIDERS = 'Providers'
- WWW = 'WWW'
+ ALWAYS = "Always"
+ API = "API"
+ CLI = "CLI"
+ CORE = "Core"
+ OTHER = "Other"
+ INTEGRATION = "Integration"
+ PROVIDERS = "Providers"
+ WWW = "WWW"
ALLOWED_TEST_TYPE_CHOICES = [
@@ -102,21 +102,21 @@ ALLOWED_TEST_TYPE_CHOICES = [
"Quarantine",
]
-ALLOWED_PACKAGE_FORMATS = ['wheel', 'sdist', 'both']
-ALLOWED_INSTALLATION_PACKAGE_FORMATS = ['wheel', 'sdist']
-ALLOWED_INSTALLATION_METHODS = ['.', 'apache-airflow']
+ALLOWED_PACKAGE_FORMATS = ["wheel", "sdist", "both"]
+ALLOWED_INSTALLATION_PACKAGE_FORMATS = ["wheel", "sdist"]
+ALLOWED_INSTALLATION_METHODS = [".", "apache-airflow"]
ALLOWED_BUILD_CACHE = ["registry", "local", "disabled"]
MULTI_PLATFORM = "linux/amd64,linux/arm64"
SINGLE_PLATFORMS = ["linux/amd64", "linux/arm64"]
ALLOWED_PLATFORMS = [*SINGLE_PLATFORMS, MULTI_PLATFORM]
-ALLOWED_USE_AIRFLOW_VERSIONS = ['none', 'wheel', 'sdist']
+ALLOWED_USE_AIRFLOW_VERSIONS = ["none", "wheel", "sdist"]
PROVIDER_PACKAGE_JSON_FILE = AIRFLOW_SOURCES_ROOT / "generated" / "provider_dependencies.json"
def get_available_documentation_packages(short_version=False) -> list[str]:
provider_names: list[str] = list(json.loads(PROVIDER_PACKAGE_JSON_FILE.read_text()).keys())
- doc_provider_names = [provider_name.replace('.', '-') for provider_name in provider_names]
+ doc_provider_names = [provider_name.replace(".", "-") for provider_name in provider_names]
available_packages = [f"apache-airflow-providers-{doc_provider}" for doc_provider in doc_provider_names]
available_packages.extend(["apache-airflow", "docker-stack", "helm-chart"])
available_packages.sort()
@@ -153,13 +153,13 @@ SQLITE_URL = "sqlite:////root/airflow/airflow.db"
PYTHONDONTWRITEBYTECODE = True
PRODUCTION_IMAGE = False
-ALL_PYTHON_MAJOR_MINOR_VERSIONS = ['3.7', '3.8', '3.9', '3.10']
+ALL_PYTHON_MAJOR_MINOR_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
CURRENT_PYTHON_MAJOR_MINOR_VERSIONS = ALL_PYTHON_MAJOR_MINOR_VERSIONS
-CURRENT_POSTGRES_VERSIONS = ['10', '11', '12', '13', '14']
+CURRENT_POSTGRES_VERSIONS = ["10", "11", "12", "13", "14"]
DEFAULT_POSTGRES_VERSION = CURRENT_POSTGRES_VERSIONS[0]
-CURRENT_MYSQL_VERSIONS = ['5.7', '8']
+CURRENT_MYSQL_VERSIONS = ["5.7", "8"]
DEFAULT_MYSQL_VERSION = CURRENT_MYSQL_VERSIONS[0]
-CURRENT_MSSQL_VERSIONS = ['2017-latest', '2019-latest']
+CURRENT_MSSQL_VERSIONS = ["2017-latest", "2019-latest"]
DEFAULT_MSSQL_VERSION = CURRENT_MSSQL_VERSIONS[0]
DB_RESET = False
@@ -176,7 +176,7 @@ SQLITE_URL = "sqlite:////root/airflow/airflow.db"
def get_airflow_version():
- airflow_setup_file = AIRFLOW_SOURCES_ROOT / 'setup.py'
+ airflow_setup_file = AIRFLOW_SOURCES_ROOT / "setup.py"
with open(airflow_setup_file) as setup_file:
for line in setup_file.readlines():
if "version =" in line:
@@ -184,53 +184,53 @@ def get_airflow_version():
def get_airflow_extras():
- airflow_dockerfile = AIRFLOW_SOURCES_ROOT / 'Dockerfile'
+ airflow_dockerfile = AIRFLOW_SOURCES_ROOT / "Dockerfile"
with open(airflow_dockerfile) as dockerfile:
for line in dockerfile.readlines():
if "ARG AIRFLOW_EXTRAS=" in line:
- line = line.split('=')[1].strip()
- return line.replace('"', '')
+ line = line.split("=")[1].strip()
+ return line.replace('"', "")
# Initialize integrations
AVAILABLE_INTEGRATIONS = [
- 'cassandra',
- 'kerberos',
- 'mongo',
- 'openldap',
- 'pinot',
- 'rabbitmq',
- 'redis',
- 'statsd',
- 'trino',
+ "cassandra",
+ "kerberos",
+ "mongo",
+ "openldap",
+ "pinot",
+ "rabbitmq",
+ "redis",
+ "statsd",
+ "trino",
]
ENABLED_INTEGRATIONS = ""
ALL_PROVIDER_YAML_FILES = Path(AIRFLOW_SOURCES_ROOT).glob("airflow/providers/**/provider.yaml")
# Initialize files for rebuild check
FILES_FOR_REBUILD_CHECK = [
- 'setup.py',
- 'setup.cfg',
- 'Dockerfile.ci',
- '.dockerignore',
- 'scripts/docker/common.sh',
- 'scripts/docker/install_additional_dependencies.sh',
- 'scripts/docker/install_airflow.sh',
- 'scripts/docker/install_airflow_dependencies_from_branch_tip.sh',
- 'scripts/docker/install_from_docker_context_files.sh',
- 'scripts/docker/install_mysql.sh',
+ "setup.py",
+ "setup.cfg",
+ "Dockerfile.ci",
+ ".dockerignore",
+ "scripts/docker/common.sh",
+ "scripts/docker/install_additional_dependencies.sh",
+ "scripts/docker/install_airflow.sh",
+ "scripts/docker/install_airflow_dependencies_from_branch_tip.sh",
+ "scripts/docker/install_from_docker_context_files.sh",
+ "scripts/docker/install_mysql.sh",
*ALL_PROVIDER_YAML_FILES,
]
ENABLED_SYSTEMS = ""
CURRENT_KUBERNETES_VERSIONS = ALLOWED_KUBERNETES_VERSIONS
-CURRENT_EXECUTORS = ['KubernetesExecutor']
+CURRENT_EXECUTORS = ["KubernetesExecutor"]
DEFAULT_KUBERNETES_VERSION = CURRENT_KUBERNETES_VERSIONS[0]
DEFAULT_EXECUTOR = CURRENT_EXECUTORS[0]
-KIND_VERSION = 'v0.16.0'
-HELM_VERSION = 'v3.9.4'
+KIND_VERSION = "v0.16.0"
+HELM_VERSION = "v3.9.4"
# Initialize image build variables - Have to check if this has to go to ci dataclass
USE_AIRFLOW_VERSION = None
diff --git a/dev/breeze/src/airflow_breeze/params/build_ci_params.py b/dev/breeze/src/airflow_breeze/params/build_ci_params.py
index 536f2106f4..04d4b147f9 100644
--- a/dev/breeze/src/airflow_breeze/params/build_ci_params.py
+++ b/dev/breeze/src/airflow_breeze/params/build_ci_params.py
@@ -43,7 +43,7 @@ class BuildCiParams(CommonBuildParams):
@property
def image_type(self) -> str:
- return 'CI'
+ return "CI"
@property
def extra_docker_build_flags(self) -> list[str]:
diff --git a/dev/breeze/src/airflow_breeze/params/build_prod_params.py b/dev/breeze/src/airflow_breeze/params/build_prod_params.py
index a5b5409008..8dfbdc0935 100644
--- a/dev/breeze/src/airflow_breeze/params/build_prod_params.py
+++ b/dev/breeze/src/airflow_breeze/params/build_prod_params.py
@@ -65,7 +65,7 @@ class BuildProdParams(CommonBuildParams):
@property
def image_type(self) -> str:
- return 'PROD'
+ return "PROD"
@property
def args_for_remote_install(self) -> list:
@@ -78,7 +78,7 @@ class BuildProdParams(CommonBuildParams):
"AIRFLOW_SOURCES_TO=/empty",
]
)
- if re.match('v?2.*', self.airflow_version):
+ if re.match("v?2.*", self.airflow_version):
build_args.extend(
["--build-arg", f"AIRFLOW_CONSTRAINTS_REFERENCE=constraints-{self.airflow_version}"]
)
@@ -91,19 +91,19 @@ class BuildProdParams(CommonBuildParams):
build_args.extend(
["--build-arg", f"AIRFLOW_CONSTRAINTS_LOCATION={self.airflow_constraints_location}"]
)
- if self.airflow_version == 'v2-0-test':
+ if self.airflow_version == "v2-0-test":
self.airflow_branch_for_pypi_preloading = "v2-0-test"
- elif self.airflow_version == 'v2-1-test':
+ elif self.airflow_version == "v2-1-test":
self.airflow_branch_for_pypi_preloading = "v2-1-test"
- elif self.airflow_version == 'v2-2-test':
+ elif self.airflow_version == "v2-2-test":
self.airflow_branch_for_pypi_preloading = "v2-2-test"
- elif re.match(r'^2\.0.*$', self.airflow_version):
+ elif re.match(r"^2\.0.*$", self.airflow_version):
self.airflow_branch_for_pypi_preloading = "v2-0-stable"
- elif re.match(r'^2\.1.*$', self.airflow_version):
+ elif re.match(r"^2\.1.*$", self.airflow_version):
self.airflow_branch_for_pypi_preloading = "v2-1-stable"
- elif re.match(r'^2\.2.*$', self.airflow_version):
+ elif re.match(r"^2\.2.*$", self.airflow_version):
self.airflow_branch_for_pypi_preloading = "v2-2-stable"
- elif re.match(r'^2\.3.*$', self.airflow_version):
+ elif re.match(r"^2\.3.*$", self.airflow_version):
self.airflow_branch_for_pypi_preloading = "v2-3-stable"
else:
self.airflow_branch_for_pypi_preloading = AIRFLOW_BRANCH
@@ -126,11 +126,11 @@ class BuildProdParams(CommonBuildParams):
)
extra_build_flags.extend(self.args_for_remote_install)
elif len(self.install_airflow_version) > 0:
- if not re.match(r'^[0-9\.]+((a|b|rc|alpha|beta|pre)[0-9]+)?$', self.install_airflow_version):
+ if not re.match(r"^[0-9\.]+((a|b|rc|alpha|beta|pre)[0-9]+)?$", self.install_airflow_version):
get_console().print(
- f'\n[error]ERROR: Bad value for install-airflow-version:{self.install_airflow_version}'
+ f"\n[error]ERROR: Bad value for install-airflow-version:{self.install_airflow_version}"
)
- get_console().print('[error]Only numerical versions allowed for PROD image here !')
+ get_console().print("[error]Only numerical versions allowed for PROD image here !")
sys.exit()
extra_build_flags.extend(["--build-arg", "AIRFLOW_INSTALLATION_METHOD=apache-airflow"])
extra_build_flags.extend(
@@ -179,19 +179,19 @@ class BuildProdParams(CommonBuildParams):
@property
def airflow_pre_cached_pip_packages(self) -> str:
- return 'false' if self.disable_airflow_repo_cache else 'true'
+ return "false" if self.disable_airflow_repo_cache else "true"
@property
def install_mssql_client(self) -> str:
- return 'false' if self.disable_mssql_client_installation else 'true'
+ return "false" if self.disable_mssql_client_installation else "true"
@property
def install_mysql_client(self) -> str:
- return 'false' if self.disable_mysql_client_installation else 'true'
+ return "false" if self.disable_mysql_client_installation else "true"
@property
def install_postgres_client(self) -> str:
- return 'false' if self.disable_postgres_client_installation else 'true'
+ return "false" if self.disable_postgres_client_installation else "true"
@property
def docker_context_files(self) -> str:
diff --git a/dev/breeze/src/airflow_breeze/params/common_build_params.py b/dev/breeze/src/airflow_breeze/params/common_build_params.py
index 9ee57329bb..9de76df3eb 100644
--- a/dev/breeze/src/airflow_breeze/params/common_build_params.py
+++ b/dev/breeze/src/airflow_breeze/params/common_build_params.py
@@ -39,9 +39,9 @@ class CommonBuildParams:
additional_dev_apt_env: str = ""
additional_python_deps: str = ""
additional_pip_install_flags: str = ""
- airflow_branch: str = os.environ.get('DEFAULT_BRANCH', AIRFLOW_BRANCH)
+ airflow_branch: str = os.environ.get("DEFAULT_BRANCH", AIRFLOW_BRANCH)
default_constraints_branch: str = os.environ.get(
- 'DEFAULT_CONSTRAINTS_BRANCH', DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
+ "DEFAULT_CONSTRAINTS_BRANCH", DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
)
airflow_constraints_location: str = ""
answer: str | None = None
@@ -52,9 +52,9 @@ class CommonBuildParams:
dev_apt_deps: str = ""
docker_cache: str = "registry"
empty_image: bool = False
- github_actions: str = os.environ.get('GITHUB_ACTIONS', "false")
+ github_actions: str = os.environ.get("GITHUB_ACTIONS", "false")
github_repository: str = APACHE_AIRFLOW_GITHUB_REPOSITORY
- github_token: str = os.environ.get('GITHUB_TOKEN', "")
+ github_token: str = os.environ.get("GITHUB_TOKEN", "")
github_username: str = ""
image_tag: str | None = None
install_providers_from_sources: bool = False
@@ -81,15 +81,15 @@ class CommonBuildParams:
@property
def airflow_base_image_name(self):
- image = f'ghcr.io/{self.github_repository.lower()}'
+ image = f"ghcr.io/{self.github_repository.lower()}"
return image
@property
def airflow_image_name(self):
"""Construct image link"""
image = (
- f'{self.airflow_base_image_name}/{self.airflow_branch}/'
- f'{self.image_type.lower()}/python{self.python}'
+ f"{self.airflow_base_image_name}/{self.airflow_branch}/"
+ f"{self.image_type.lower()}/python{self.python}"
)
return image
@@ -114,11 +114,11 @@ class CommonBuildParams:
"""Construct Python Base Image"""
if self.python_image is not None:
return self.python_image
- return f'python:{self.python}-slim-bullseye'
+ return f"python:{self.python}-slim-bullseye"
@property
def airflow_image_repository(self):
- return f'https://github.com/{self.github_repository}'
+ return f"https://github.com/{self.github_repository}"
@property
def airflow_image_date_created(self):
@@ -133,8 +133,8 @@ class CommonBuildParams:
def airflow_image_name_with_tag(self):
"""Construct image link"""
image = (
- f'{self.airflow_base_image_name}/{self.airflow_branch}/'
- f'{self.image_type.lower()}/python{self.python}'
+ f"{self.airflow_base_image_name}/{self.airflow_branch}/"
+ f"{self.image_type.lower()}/python{self.python}"
)
return image if self.image_tag is None else image + f":{self.image_tag}"
diff --git a/dev/breeze/src/airflow_breeze/params/doc_build_params.py b/dev/breeze/src/airflow_breeze/params/doc_build_params.py
index 5e64f9a253..c331e9b084 100644
--- a/dev/breeze/src/airflow_breeze/params/doc_build_params.py
+++ b/dev/breeze/src/airflow_breeze/params/doc_build_params.py
@@ -29,7 +29,7 @@ class DocBuildParams:
spellcheck_only: bool
for_production: bool
skip_environment_initialization: bool = False
- github_actions = os.environ.get('GITHUB_ACTIONS', "false")
+ github_actions = os.environ.get("GITHUB_ACTIONS", "false")
@property
def args_doc_builder(self) -> list[str]:
diff --git a/dev/breeze/src/airflow_breeze/params/shell_params.py b/dev/breeze/src/airflow_breeze/params/shell_params.py
index 34afb4773f..02489ff102 100644
--- a/dev/breeze/src/airflow_breeze/params/shell_params.py
+++ b/dev/breeze/src/airflow_breeze/params/shell_params.py
@@ -52,8 +52,8 @@ DOCKER_COMPOSE_DIR = SCRIPTS_CI_DIR / "docker-compose"
def add_mssql_compose_file(compose_file_list: list[Path]):
- docker_filesystem = get_filesystem_type('/var/lib/docker')
- if docker_filesystem == 'tmpfs':
+ docker_filesystem = get_filesystem_type("/var/lib/docker")
+ if docker_filesystem == "tmpfs":
compose_file_list.append(DOCKER_COMPOSE_DIR / "backend-mssql-tmpfs-volume.yml")
else:
compose_file_list.append(DOCKER_COMPOSE_DIR / "backend-mssql-docker-volume.yml")
@@ -65,9 +65,9 @@ class ShellParams:
Shell parameters. Those parameters are used to determine command issued to run shell command.
"""
- airflow_branch: str = os.environ.get('DEFAULT_BRANCH', AIRFLOW_BRANCH)
+ airflow_branch: str = os.environ.get("DEFAULT_BRANCH", AIRFLOW_BRANCH)
default_constraints_branch: str = os.environ.get(
- 'DEFAULT_CONSTRAINTS_BRANCH', DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
+ "DEFAULT_CONSTRAINTS_BRANCH", DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
)
airflow_constraints_reference: str = DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH
airflow_extras: str = ""
@@ -82,9 +82,9 @@ class ShellParams:
forward_ports: bool = True
forward_credentials: str = "false"
airflow_constraints_mode: str = ALLOWED_CONSTRAINTS_MODES_CI[0]
- github_actions: str = os.environ.get('GITHUB_ACTIONS', "false")
+ github_actions: str = os.environ.get("GITHUB_ACTIONS", "false")
github_repository: str = APACHE_AIRFLOW_GITHUB_REPOSITORY
- github_token: str = os.environ.get('GITHUB_TOKEN', "")
+ github_token: str = os.environ.get("GITHUB_TOKEN", "")
image_tag: str | None = None
include_mypy_volume: bool = False
install_airflow_version: str = ""
@@ -122,20 +122,20 @@ class ShellParams:
@property
def airflow_version_for_production_image(self):
- cmd = ['docker', 'run', '--entrypoint', '/bin/bash', f'{self.airflow_image_name}']
- cmd.extend(['-c', 'echo "${AIRFLOW_VERSION}"'])
+ cmd = ["docker", "run", "--entrypoint", "/bin/bash", f"{self.airflow_image_name}"]
+ cmd.extend(["-c", 'echo "${AIRFLOW_VERSION}"'])
output = run_command(cmd, capture_output=True, text=True)
return output.stdout.strip() if output.stdout else "UNKNOWN_VERSION"
@property
def airflow_base_image_name(self) -> str:
- image = f'ghcr.io/{self.github_repository.lower()}'
+ image = f"ghcr.io/{self.github_repository.lower()}"
return image
@property
def airflow_image_name(self) -> str:
"""Construct CI image link"""
- image = f'{self.airflow_base_image_name}/{self.airflow_branch}/ci/python{self.python}'
+ image = f"{self.airflow_base_image_name}/{self.airflow_branch}/ci/python{self.python}"
return image
@property
@@ -145,7 +145,7 @@ class ShellParams:
@property
def airflow_image_kubernetes(self) -> str:
- image = f'{self.airflow_base_image_name}/{self.airflow_branch}/kubernetes/python{self.python}'
+ image = f"{self.airflow_base_image_name}/{self.airflow_branch}/kubernetes/python{self.python}"
return image
@property
@@ -164,7 +164,7 @@ class ShellParams:
@property
def image_type(self) -> str:
- return 'CI'
+ return "CI"
@property
def md5sum_cache_dir(self) -> Path:
@@ -173,12 +173,12 @@ class ShellParams:
@property
def backend_version(self) -> str:
- version = ''
- if self.backend == 'postgres':
+ version = ""
+ if self.backend == "postgres":
version = self.postgres_version
- if self.backend == 'mysql':
+ if self.backend == "mysql":
version = self.mysql_version
- if self.backend == 'mssql':
+ if self.backend == "mssql":
version = self.mssql_version
return version
@@ -189,17 +189,17 @@ class ShellParams:
def print_badge_info(self):
if self.verbose:
- get_console().print(f'[info]Use {self.image_type} image[/]')
- get_console().print(f'[info]Branch Name: {self.airflow_branch}[/]')
- get_console().print(f'[info]Docker Image: {self.airflow_image_name_with_tag}[/]')
- get_console().print(f'[info]Airflow source version:{self.airflow_version}[/]')
- get_console().print(f'[info]Python Version: {self.python}[/]')
- get_console().print(f'[info]Backend: {self.backend} {self.backend_version}[/]')
- get_console().print(f'[info]Airflow used at runtime: {self.use_airflow_version}[/]')
+ get_console().print(f"[info]Use {self.image_type} image[/]")
+ get_console().print(f"[info]Branch Name: {self.airflow_branch}[/]")
+ get_console().print(f"[info]Docker Image: {self.airflow_image_name_with_tag}[/]")
+ get_console().print(f"[info]Airflow source version:{self.airflow_version}[/]")
+ get_console().print(f"[info]Python Version: {self.python}[/]")
+ get_console().print(f"[info]Backend: {self.backend} {self.backend_version}[/]")
+ get_console().print(f"[info]Airflow used at runtime: {self.use_airflow_version}[/]")
def get_backend_compose_files(self, backend: str) -> list[Path]:
backend_docker_compose_file = DOCKER_COMPOSE_DIR / f"backend-{backend}.yml"
- if backend == 'sqlite' or not self.forward_ports:
+ if backend == "sqlite" or not self.forward_ports:
return [backend_docker_compose_file]
return [backend_docker_compose_file, DOCKER_COMPOSE_DIR / f"backend-{backend}-port.yml"]
@@ -209,7 +209,7 @@ class ShellParams:
backend_files: list[Path] = []
if self.backend != "all":
backend_files = self.get_backend_compose_files(self.backend)
- if self.backend == 'mssql':
+ if self.backend == "mssql":
add_mssql_compose_file(compose_file_list)
else:
for backend in ALLOWED_BACKENDS:
diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py
index 97440dddda..9e4b5b5bf1 100644
--- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py
+++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py
@@ -22,95 +22,95 @@
# `pre_commit_ids_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze` DIRECTORY
PRE_COMMIT_LIST = [
- 'all',
- 'black',
- 'blacken-docs',
- 'check-airflow-2-2-compatibility',
- 'check-airflow-config-yaml-consistent',
- 'check-apache-license-rat',
- 'check-base-operator-partial-arguments',
- 'check-base-operator-usage',
- 'check-boring-cyborg-configuration',
- 'check-breeze-top-dependencies-limited',
- 'check-builtin-literals',
- 'check-changelog-has-no-duplicates',
- 'check-core-deprecation-classes',
- 'check-daysago-import-from-utils',
- 'check-decorated-operator-implements-custom-name',
- 'check-docstring-param-types',
- 'check-example-dags-urls',
- 'check-executables-have-shebangs',
- 'check-extra-packages-references',
- 'check-extras-order',
- 'check-for-inclusive-language',
- 'check-hooks-apply',
- 'check-incorrect-use-of-LoggingMixin',
- 'check-init-decorator-arguments',
- 'check-lazy-logging',
- 'check-merge-conflict',
- 'check-newsfragments-are-valid',
- 'check-no-providers-in-core-examples',
- 'check-no-relative-imports',
- 'check-persist-credentials-disabled-in-github-workflows',
- 'check-pre-commit-information-consistent',
- 'check-provide-create-sessions-imports',
- 'check-provider-yaml-valid',
- 'check-providers-init-file-missing',
- 'check-providers-subpackages-init-file-exist',
- 'check-pydevd-left-in-code',
- 'check-revision-heads-map',
- 'check-safe-filter-usage-in-html',
- 'check-setup-order',
- 'check-start-date-not-used-in-defaults',
- 'check-system-tests-present',
- 'check-system-tests-tocs',
- 'check-xml',
- 'codespell',
- 'compile-www-assets',
- 'compile-www-assets-dev',
- 'create-missing-init-py-files-tests',
- 'debug-statements',
- 'detect-private-key',
- 'doctoc',
- 'end-of-file-fixer',
- 'fix-encoding-pragma',
- 'flynt',
- 'identity',
- 'insert-license',
- 'isort',
- 'lint-chart-schema',
- 'lint-css',
- 'lint-dockerfile',
- 'lint-helm-chart',
- 'lint-json-schema',
- 'lint-markdown',
- 'lint-openapi',
- 'mixed-line-ending',
- 'pretty-format-json',
- 'pydocstyle',
- 'python-no-log-warn',
- 'pyupgrade',
- 'replace-bad-characters',
- 'rst-backticks',
- 'run-flake8',
- 'run-mypy',
- 'run-shellcheck',
- 'static-check-autoflake',
- 'trailing-whitespace',
- 'ts-compile-and-lint-javascript',
- 'update-breeze-cmd-output',
- 'update-breeze-readme-config-hash',
- 'update-er-diagram',
- 'update-extras',
- 'update-in-the-wild-to-be-sorted',
- 'update-inlined-dockerfile-scripts',
- 'update-local-yml-file',
- 'update-migration-references',
- 'update-providers-dependencies',
- 'update-spelling-wordlist-to-be-sorted',
- 'update-supported-versions',
- 'update-vendored-in-k8s-json-schema',
- 'update-version',
- 'yamllint',
- 'yesqa',
+ "all",
+ "black",
+ "blacken-docs",
+ "check-airflow-2-2-compatibility",
+ "check-airflow-config-yaml-consistent",
+ "check-apache-license-rat",
+ "check-base-operator-partial-arguments",
+ "check-base-operator-usage",
+ "check-boring-cyborg-configuration",
+ "check-breeze-top-dependencies-limited",
+ "check-builtin-literals",
+ "check-changelog-has-no-duplicates",
+ "check-core-deprecation-classes",
+ "check-daysago-import-from-utils",
+ "check-decorated-operator-implements-custom-name",
+ "check-docstring-param-types",
+ "check-example-dags-urls",
+ "check-executables-have-shebangs",
+ "check-extra-packages-references",
+ "check-extras-order",
+ "check-for-inclusive-language",
+ "check-hooks-apply",
+ "check-incorrect-use-of-LoggingMixin",
+ "check-init-decorator-arguments",
+ "check-lazy-logging",
+ "check-merge-conflict",
+ "check-newsfragments-are-valid",
+ "check-no-providers-in-core-examples",
+ "check-no-relative-imports",
+ "check-persist-credentials-disabled-in-github-workflows",
+ "check-pre-commit-information-consistent",
+ "check-provide-create-sessions-imports",
+ "check-provider-yaml-valid",
+ "check-providers-init-file-missing",
+ "check-providers-subpackages-init-file-exist",
+ "check-pydevd-left-in-code",
+ "check-revision-heads-map",
+ "check-safe-filter-usage-in-html",
+ "check-setup-order",
+ "check-start-date-not-used-in-defaults",
+ "check-system-tests-present",
+ "check-system-tests-tocs",
+ "check-xml",
+ "codespell",
+ "compile-www-assets",
+ "compile-www-assets-dev",
+ "create-missing-init-py-files-tests",
+ "debug-statements",
+ "detect-private-key",
+ "doctoc",
+ "end-of-file-fixer",
+ "fix-encoding-pragma",
+ "flynt",
+ "identity",
+ "insert-license",
+ "isort",
+ "lint-chart-schema",
+ "lint-css",
+ "lint-dockerfile",
+ "lint-helm-chart",
+ "lint-json-schema",
+ "lint-markdown",
+ "lint-openapi",
+ "mixed-line-ending",
+ "pretty-format-json",
+ "pydocstyle",
+ "python-no-log-warn",
+ "pyupgrade",
+ "replace-bad-characters",
+ "rst-backticks",
+ "run-flake8",
+ "run-mypy",
+ "run-shellcheck",
+ "static-check-autoflake",
+ "trailing-whitespace",
+ "ts-compile-and-lint-javascript",
+ "update-breeze-cmd-output",
+ "update-breeze-readme-config-hash",
+ "update-er-diagram",
+ "update-extras",
+ "update-in-the-wild-to-be-sorted",
+ "update-inlined-dockerfile-scripts",
+ "update-local-yml-file",
+ "update-migration-references",
+ "update-providers-dependencies",
+ "update-spelling-wordlist-to-be-sorted",
+ "update-supported-versions",
+ "update-vendored-in-k8s-json-schema",
+ "update-version",
+ "yamllint",
+ "yesqa",
]
diff --git a/dev/breeze/src/airflow_breeze/utils/cache.py b/dev/breeze/src/airflow_breeze/utils/cache.py
index c23d3012b6..a52b17ac97 100644
--- a/dev/breeze/src/airflow_breeze/utils/cache.py
+++ b/dev/breeze/src/airflow_breeze/utils/cache.py
@@ -65,9 +65,9 @@ def write_to_cache_file(param_name: str, param_value: str, check_allowed_values:
cache_path.parent.mkdir(parents=True, exist_ok=True)
cache_path.write_text(param_value)
else:
- get_console().print(f'[cyan]You have sent the {param_value} for {param_name}')
- get_console().print(f'[cyan]Allowed value for the {param_name} are {allowed_values}')
- get_console().print('[cyan]Provide one of the supported params. Write to cache dir failed')
+ get_console().print(f"[cyan]You have sent the {param_value} for {param_name}")
+ get_console().print(f"[cyan]Allowed value for the {param_name} are {allowed_values}")
+ get_console().print("[cyan]Provide one of the supported params. Write to cache dir failed")
sys.exit(1)
@@ -102,7 +102,7 @@ def read_and_validate_value_from_cache(param_name: str, default_param_value: str
def check_if_values_allowed(param_name: str, param_value: str) -> tuple[bool, list[Any]]:
"""Checks if parameter value is allowed by looking at global constants."""
allowed = False
- allowed_values = getattr(global_constants, f'ALLOWED_{param_name.upper()}S')
+ allowed_values = getattr(global_constants, f"ALLOWED_{param_name.upper()}S")
if param_value in allowed_values:
allowed = True
return allowed, allowed_values
diff --git a/dev/breeze/src/airflow_breeze/utils/ci_group.py b/dev/breeze/src/airflow_breeze/utils/ci_group.py
index b28e525f40..13a1110cb0 100644
--- a/dev/breeze/src/airflow_breeze/utils/ci_group.py
+++ b/dev/breeze/src/airflow_breeze/utils/ci_group.py
@@ -40,7 +40,7 @@ def ci_group(title: str, message_type: MessageType | None = MessageType.INFO, ou
if _in_ci_group or skip_group_output():
yield
return
- if os.environ.get('GITHUB_ACTIONS', 'false') != "true":
+ if os.environ.get("GITHUB_ACTIONS", "false") != "true":
if message_type is not None:
get_console(output=output).print(f"\n[{message_type.value}]{title}\n")
else:
diff --git a/dev/breeze/src/airflow_breeze/utils/common_options.py b/dev/breeze/src/airflow_breeze/utils/common_options.py
index a42ac68ef2..34abaf674c 100644
--- a/dev/breeze/src/airflow_breeze/utils/common_options.py
+++ b/dev/breeze/src/airflow_breeze/utils/common_options.py
@@ -73,7 +73,7 @@ option_verbose = click.option(
"--verbose",
is_flag=True,
help="Print verbose information about performed steps.",
- envvar='VERBOSE',
+ envvar="VERBOSE",
callback=_set_default_from_parent,
)
option_dry_run = click.option(
@@ -81,359 +81,359 @@ option_dry_run = click.option(
"--dry-run",
is_flag=True,
help="If dry-run is set, commands are only printed, not executed.",
- envvar='DRY_RUN',
+ envvar="DRY_RUN",
callback=_set_default_from_parent,
)
option_answer = click.option(
"-a",
"--answer",
- type=AnswerChoice(['y', 'n', 'q', 'yes', 'no', 'quit']),
+ type=AnswerChoice(["y", "n", "q", "yes", "no", "quit"]),
help="Force answer to questions.",
- envvar='ANSWER',
+ envvar="ANSWER",
callback=_set_default_from_parent,
)
option_github_repository = click.option(
- '-g',
- '--github-repository',
- help='GitHub repository used to pull, push run images.',
+ "-g",
+ "--github-repository",
+ help="GitHub repository used to pull, push run images.",
default=APACHE_AIRFLOW_GITHUB_REPOSITORY,
show_default=True,
- envvar='GITHUB_REPOSITORY',
+ envvar="GITHUB_REPOSITORY",
callback=_set_default_from_parent,
)
option_python = click.option(
- '-p',
- '--python',
+ "-p",
+ "--python",
type=CacheableChoice(ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS),
default=CacheableDefault(value=ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS[0]),
show_default=True,
- help='Python major/minor version used in Airflow image for images.',
- envvar='PYTHON_MAJOR_MINOR_VERSION',
+ help="Python major/minor version used in Airflow image for images.",
+ envvar="PYTHON_MAJOR_MINOR_VERSION",
)
option_backend = click.option(
- '-b',
- '--backend',
+ "-b",
+ "--backend",
type=CacheableChoice(ALLOWED_BACKENDS),
default=CacheableDefault(value=ALLOWED_BACKENDS[0]),
show_default=True,
help="Database backend to use.",
- envvar='BACKEND',
+ envvar="BACKEND",
)
option_integration = click.option(
- '--integration',
+ "--integration",
help="Integration(s) to enable when running (can be more than one).",
type=BetterChoice(ALLOWED_INTEGRATIONS),
multiple=True,
)
option_postgres_version = click.option(
- '-P',
- '--postgres-version',
+ "-P",
+ "--postgres-version",
type=CacheableChoice(ALLOWED_POSTGRES_VERSIONS),
default=CacheableDefault(ALLOWED_POSTGRES_VERSIONS[0]),
show_default=True,
help="Version of Postgres used.",
)
option_mysql_version = click.option(
- '-M',
- '--mysql-version',
+ "-M",
+ "--mysql-version",
help="Version of MySQL used.",
type=CacheableChoice(ALLOWED_MYSQL_VERSIONS),
default=CacheableDefault(ALLOWED_MYSQL_VERSIONS[0]),
show_default=True,
)
option_mssql_version = click.option(
- '-S',
- '--mssql-version',
+ "-S",
+ "--mssql-version",
help="Version of MsSQL used.",
type=CacheableChoice(ALLOWED_MSSQL_VERSIONS),
default=CacheableDefault(ALLOWED_MSSQL_VERSIONS[0]),
show_default=True,
)
option_forward_credentials = click.option(
- '-f', '--forward-credentials', help="Forward local credentials to container when running.", is_flag=True
+ "-f", "--forward-credentials", help="Forward local credentials to container when running.", is_flag=True
)
option_use_airflow_version = click.option(
- '--use-airflow-version',
+ "--use-airflow-version",
help="Use (reinstall at entry) Airflow version from PyPI. It can also be `none`, `wheel`, or `sdist`"
" if Airflow should be removed, installed from wheel packages or sdist packages available in dist "
"folder respectively. Implies --mount-sources `remove`.",
type=UseAirflowVersionType(ALLOWED_USE_AIRFLOW_VERSIONS),
- envvar='USE_AIRFLOW_VERSION',
+ envvar="USE_AIRFLOW_VERSION",
)
option_airflow_extras = click.option(
- '--airflow-extras',
+ "--airflow-extras",
help="Airflow extras to install when --use-airflow-version is used",
default="",
show_default=True,
- envvar='AIRFLOW_EXTRAS',
+ envvar="AIRFLOW_EXTRAS",
)
option_mount_sources = click.option(
- '--mount-sources',
+ "--mount-sources",
type=BetterChoice(ALLOWED_MOUNT_OPTIONS),
default=ALLOWED_MOUNT_OPTIONS[0],
show_default=True,
help="Choose scope of local sources that should be mounted, skipped, or removed (default = selected).",
)
option_force_build = click.option(
- '--force-build', help="Force image build no matter if it is determined as needed.", is_flag=True
+ "--force-build", help="Force image build no matter if it is determined as needed.", is_flag=True
)
option_db_reset = click.option(
- '-d',
- '--db-reset',
+ "-d",
+ "--db-reset",
help="Reset DB when entering the container.",
is_flag=True,
- envvar='DB_RESET',
+ envvar="DB_RESET",
)
option_use_packages_from_dist = click.option(
- '--use-packages-from-dist',
+ "--use-packages-from-dist",
is_flag=True,
help="Install all found packages (--package-format determines type) from 'dist' folder "
"when entering breeze.",
- envvar='USE_PACKAGES_FROM_DIST',
+ envvar="USE_PACKAGES_FROM_DIST",
)
option_docker_cache = click.option(
- '-c',
- '--docker-cache',
- help='Cache option for image used during the build.',
+ "-c",
+ "--docker-cache",
+ help="Cache option for image used during the build.",
default=ALLOWED_BUILD_CACHE[0],
show_default=True,
type=BetterChoice(ALLOWED_BUILD_CACHE),
)
option_github_token = click.option(
- '--github-token',
- help='The token used to authenticate to GitHub.',
- envvar='GITHUB_TOKEN',
+ "--github-token",
+ help="The token used to authenticate to GitHub.",
+ envvar="GITHUB_TOKEN",
)
option_github_username = click.option(
- '--github-username',
- help='The user name used to authenticate to GitHub.',
- envvar='GITHUB_USERNAME',
+ "--github-username",
+ help="The user name used to authenticate to GitHub.",
+ envvar="GITHUB_USERNAME",
)
option_image_tag_for_pulling = click.option(
- '-t',
- '--image-tag',
- help='Tag of the image which is used to pull the image.',
+ "-t",
+ "--image-tag",
+ help="Tag of the image which is used to pull the image.",
show_default=True,
default="latest",
- envvar='IMAGE_TAG',
+ envvar="IMAGE_TAG",
)
option_image_tag_for_building = click.option(
- '-t',
- '--image-tag',
- help='Tag the image after building it.',
+ "-t",
+ "--image-tag",
+ help="Tag the image after building it.",
show_default=True,
default="latest",
- envvar='IMAGE_TAG',
+ envvar="IMAGE_TAG",
)
option_image_tag_for_running = click.option(
- '-t',
- '--image-tag',
- help='Tag of the image which is used to run the image (implies --mount-sources=skip).',
+ "-t",
+ "--image-tag",
+ help="Tag of the image which is used to run the image (implies --mount-sources=skip).",
show_default=True,
default="latest",
- envvar='IMAGE_TAG',
+ envvar="IMAGE_TAG",
)
option_image_tag_for_verifying = click.option(
- '-t',
- '--image-tag',
- help='Tag of the image when verifying it.',
+ "-t",
+ "--image-tag",
+ help="Tag of the image when verifying it.",
show_default=True,
default="latest",
- envvar='IMAGE_TAG',
+ envvar="IMAGE_TAG",
)
option_image_name = click.option(
- '-n', '--image-name', help='Name of the image to verify (overrides --python and --image-tag).'
+ "-n", "--image-name", help="Name of the image to verify (overrides --python and --image-tag)."
)
option_platform_multiple = click.option(
- '--platform',
- help='Platform for Airflow image.',
- envvar='PLATFORM',
+ "--platform",
+ help="Platform for Airflow image.",
+ envvar="PLATFORM",
type=BetterChoice(ALLOWED_PLATFORMS),
)
option_platform_single = click.option(
- '--platform',
- help='Platform for Airflow image.',
- envvar='PLATFORM',
+ "--platform",
+ help="Platform for Airflow image.",
+ envvar="PLATFORM",
type=BetterChoice(SINGLE_PLATFORMS),
)
option_upgrade_to_newer_dependencies = click.option(
"-u",
- '--upgrade-to-newer-dependencies',
+ "--upgrade-to-newer-dependencies",
is_flag=True,
- help='When set, upgrade all PIP packages to latest.',
- envvar='UPGRADE_TO_NEWER_DEPENDENCIES',
+ help="When set, upgrade all PIP packages to latest.",
+ envvar="UPGRADE_TO_NEWER_DEPENDENCIES",
)
option_upgrade_on_failure = click.option(
"-u",
- '--upgrade-on-failure',
+ "--upgrade-on-failure",
is_flag=True,
- help='When set, attempt to run upgrade to newer dependencies when regular build fails.',
- envvar='UPGRADE_ON_FAILURE',
+ help="When set, attempt to run upgrade to newer dependencies when regular build fails.",
+ envvar="UPGRADE_ON_FAILURE",
)
option_additional_extras = click.option(
- '--additional-extras',
- help='Additional extra package while installing Airflow in the image.',
- envvar='ADDITIONAL_AIRFLOW_EXTRAS',
+ "--additional-extras",
+ help="Additional extra package while installing Airflow in the image.",
+ envvar="ADDITIONAL_AIRFLOW_EXTRAS",
)
option_additional_dev_apt_deps = click.option(
- '--additional-dev-apt-deps',
- help='Additional apt dev dependencies to use when building the images.',
- envvar='ADDITIONAL_DEV_APT_DEPS',
+ "--additional-dev-apt-deps",
+ help="Additional apt dev dependencies to use when building the images.",
+ envvar="ADDITIONAL_DEV_APT_DEPS",
)
option_additional_runtime_apt_deps = click.option(
- '--additional-runtime-apt-deps',
- help='Additional apt runtime dependencies to use when building the images.',
- envvar='ADDITIONAL_RUNTIME_APT_DEPS',
+ "--additional-runtime-apt-deps",
+ help="Additional apt runtime dependencies to use when building the images.",
+ envvar="ADDITIONAL_RUNTIME_APT_DEPS",
)
option_additional_python_deps = click.option(
- '--additional-python-deps',
- help='Additional python dependencies to use when building the images.',
- envvar='ADDITIONAL_PYTHON_DEPS',
+ "--additional-python-deps",
+ help="Additional python dependencies to use when building the images.",
+ envvar="ADDITIONAL_PYTHON_DEPS",
)
option_additional_dev_apt_command = click.option(
- '--additional-dev-apt-command',
- help='Additional command executed before dev apt deps are installed.',
- envvar='ADDITIONAL_DEV_APT_COMMAND',
+ "--additional-dev-apt-command",
+ help="Additional command executed before dev apt deps are installed.",
+ envvar="ADDITIONAL_DEV_APT_COMMAND",
)
option_additional_runtime_apt_command = click.option(
- '--additional-runtime-apt-command',
- help='Additional command executed before runtime apt deps are installed.',
- envvar='ADDITIONAL_RUNTIME_APT_COMMAND',
+ "--additional-runtime-apt-command",
+ help="Additional command executed before runtime apt deps are installed.",
+ envvar="ADDITIONAL_RUNTIME_APT_COMMAND",
)
option_additional_dev_apt_env = click.option(
- '--additional-dev-apt-env',
- help='Additional environment variables set when adding dev dependencies.',
- envvar='ADDITIONAL_DEV_APT_ENV',
+ "--additional-dev-apt-env",
+ help="Additional environment variables set when adding dev dependencies.",
+ envvar="ADDITIONAL_DEV_APT_ENV",
)
option_additional_runtime_apt_env = click.option(
- '--additional-runtime-apt-env',
- help='Additional environment variables set when adding runtime dependencies.',
- envvar='ADDITIONAL_RUNTIME_APT_ENV',
+ "--additional-runtime-apt-env",
+ help="Additional environment variables set when adding runtime dependencies.",
+ envvar="ADDITIONAL_RUNTIME_APT_ENV",
)
option_dev_apt_command = click.option(
- '--dev-apt-command',
- help='Command executed before dev apt deps are installed.',
- envvar='DEV_APT_COMMAND',
+ "--dev-apt-command",
+ help="Command executed before dev apt deps are installed.",
+ envvar="DEV_APT_COMMAND",
)
option_dev_apt_deps = click.option(
- '--dev-apt-deps',
- help='Apt dev dependencies to use when building the images.',
- envvar='DEV_APT_DEPS',
+ "--dev-apt-deps",
+ help="Apt dev dependencies to use when building the images.",
+ envvar="DEV_APT_DEPS",
)
option_runtime_apt_command = click.option(
- '--runtime-apt-command',
- help='Command executed before runtime apt deps are installed.',
- envvar='RUNTIME_APT_COMMAND',
+ "--runtime-apt-command",
+ help="Command executed before runtime apt deps are installed.",
+ envvar="RUNTIME_APT_COMMAND",
)
option_runtime_apt_deps = click.option(
- '--runtime-apt-deps',
- help='Apt runtime dependencies to use when building the images.',
- envvar='RUNTIME_APT_DEPS',
+ "--runtime-apt-deps",
+ help="Apt runtime dependencies to use when building the images.",
+ envvar="RUNTIME_APT_DEPS",
)
option_prepare_buildx_cache = click.option(
- '--prepare-buildx-cache',
- help='Prepares build cache (this is done as separate per-platform steps instead of building the image).',
+ "--prepare-buildx-cache",
+ help="Prepares build cache (this is done as separate per-platform steps instead of building the image).",
is_flag=True,
- envvar='PREPARE_BUILDX_CACHE',
+ envvar="PREPARE_BUILDX_CACHE",
)
option_push = click.option(
- '--push',
- help='Push image after building it.',
+ "--push",
+ help="Push image after building it.",
is_flag=True,
- envvar='PUSH',
+ envvar="PUSH",
)
option_empty_image = click.option(
- '--empty-image',
- help='Prepare empty image tagged with the same name as the Airflow image.',
+ "--empty-image",
+ help="Prepare empty image tagged with the same name as the Airflow image.",
is_flag=True,
- envvar='EMPTY_IMAGE',
+ envvar="EMPTY_IMAGE",
)
option_wait_for_image = click.option(
- '--wait-for-image',
- help='Wait until image is available.',
+ "--wait-for-image",
+ help="Wait until image is available.",
is_flag=True,
- envvar='WAIT_FOR_IMAGE',
+ envvar="WAIT_FOR_IMAGE",
)
option_tag_as_latest = click.option(
- '--tag-as-latest',
- help='Tags the image as latest and update checksum of all files after pulling. '
- 'Useful when you build or pull image with --image-tag.',
+ "--tag-as-latest",
+ help="Tags the image as latest and update checksum of all files after pulling. "
+ "Useful when you build or pull image with --image-tag.",
is_flag=True,
- envvar='TAG_AS_LATEST',
+ envvar="TAG_AS_LATEST",
)
option_verify = click.option(
- '--verify',
- help='Verify image.',
+ "--verify",
+ help="Verify image.",
is_flag=True,
- envvar='VERIFY',
+ envvar="VERIFY",
)
option_additional_pip_install_flags = click.option(
- '--additional-pip-install-flags',
- help='Additional flags added to `pip install` commands (except reinstalling `pip` itself).',
- envvar='ADDITIONAL_PIP_INSTALL_FLAGS',
+ "--additional-pip-install-flags",
+ help="Additional flags added to `pip install` commands (except reinstalling `pip` itself).",
+ envvar="ADDITIONAL_PIP_INSTALL_FLAGS",
)
option_install_providers_from_sources = click.option(
- '--install-providers-from-sources',
+ "--install-providers-from-sources",
help="Install providers from sources when installing.",
is_flag=True,
- envvar='INSTALL_PROVIDERS_FROM_SOURCES',
+ envvar="INSTALL_PROVIDERS_FROM_SOURCES",
)
option_load_example_dags = click.option(
- '-e',
- '--load-example-dags',
+ "-e",
+ "--load-example-dags",
help="Enable configuration to load example DAGs when starting Airflow.",
is_flag=True,
- envvar='LOAD_EXAMPLES',
+ envvar="LOAD_EXAMPLES",
)
option_load_default_connection = click.option(
- '-c',
- '--load-default-connections',
+ "-c",
+ "--load-default-connections",
help="Enable configuration to load default connections when starting Airflow.",
is_flag=True,
- envvar='LOAD_DEFAULT_CONNECTIONS',
+ envvar="LOAD_DEFAULT_CONNECTIONS",
)
option_version_suffix_for_pypi = click.option(
- '--version-suffix-for-pypi',
- help='Version suffix used for PyPI packages (alpha, beta, rc1, etc.).',
+ "--version-suffix-for-pypi",
+ help="Version suffix used for PyPI packages (alpha, beta, rc1, etc.).",
default="",
- envvar='VERSION_SUFFIX_FOR_PYPI',
+ envvar="VERSION_SUFFIX_FOR_PYPI",
)
option_package_format = click.option(
- '--package-format',
+ "--package-format",
type=BetterChoice(ALLOWED_PACKAGE_FORMATS),
- help='Format of packages.',
+ help="Format of packages.",
default=ALLOWED_PACKAGE_FORMATS[0],
show_default=True,
- envvar='PACKAGE_FORMAT',
+ envvar="PACKAGE_FORMAT",
)
option_installation_package_format = click.option(
- '--package-format',
+ "--package-format",
type=BetterChoice(ALLOWED_INSTALLATION_PACKAGE_FORMATS),
- help='Format of packages that should be installed from dist.',
+ help="Format of packages that should be installed from dist.",
default=ALLOWED_INSTALLATION_PACKAGE_FORMATS[0],
show_default=True,
- envvar='PACKAGE_FORMAT',
+ envvar="PACKAGE_FORMAT",
)
option_python_versions = click.option(
- '--python-versions',
+ "--python-versions",
help="Space separated list of python versions used for build with multiple versions.",
default=" ".join(ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS),
show_default=True,
envvar="PYTHON_VERSIONS",
)
option_run_in_parallel = click.option(
- '--run-in-parallel',
+ "--run-in-parallel",
help="Run the operation in parallel on all or selected subset of Python versions.",
is_flag=True,
- envvar='RUN_IN_PARALLEL',
+ envvar="RUN_IN_PARALLEL",
)
option_parallelism = click.option(
- '--parallelism',
+ "--parallelism",
help="Maximum number of processes to use while running the operation in parallel.",
type=click.IntRange(1, mp.cpu_count() * 2 if not generating_command_images() else 8),
default=mp.cpu_count() if not generating_command_images() else 4,
- envvar='PARALLELISM',
+ envvar="PARALLELISM",
show_default=True,
)
argument_packages = click.argument(
@@ -464,75 +464,75 @@ option_airflow_constraints_reference = click.option(
help="Constraint reference to use. Useful with --use-airflow-version parameter to specify "
"constraints for the installed version and to find newer dependencies",
default=DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH,
- envvar='AIRFLOW_CONSTRAINTS_REFERENCE',
+ envvar="AIRFLOW_CONSTRAINTS_REFERENCE",
)
option_airflow_constraints_reference_build = click.option(
"--airflow-constraints-reference",
default=DEFAULT_AIRFLOW_CONSTRAINTS_BRANCH,
help="Constraint reference to use when building the image.",
- envvar='AIRFLOW_CONSTRAINTS_REFERENCE',
+ envvar="AIRFLOW_CONSTRAINTS_REFERENCE",
)
option_airflow_constraints_mode_ci = click.option(
- '--airflow-constraints-mode',
+ "--airflow-constraints-mode",
type=BetterChoice(ALLOWED_CONSTRAINTS_MODES_CI),
default=ALLOWED_CONSTRAINTS_MODES_CI[0],
show_default=True,
- help='Mode of constraints for CI image building.',
+ help="Mode of constraints for CI image building.",
)
option_airflow_constraints_mode_prod = click.option(
- '--airflow-constraints-mode',
+ "--airflow-constraints-mode",
type=BetterChoice(ALLOWED_CONSTRAINTS_MODES_PROD),
default=ALLOWED_CONSTRAINTS_MODES_PROD[0],
show_default=True,
- help='Mode of constraints for PROD image building.',
+ help="Mode of constraints for PROD image building.",
)
option_pull = click.option(
- '--pull',
+ "--pull",
help="Pull image is missing before attempting to verify it.",
is_flag=True,
- envvar='PULL',
+ envvar="PULL",
)
option_python_image = click.option(
- '--python-image',
+ "--python-image",
help="If specified this is the base python image used to build the image. "
"Should be something like: python:VERSION-slim-bullseye.",
- envvar='PYTHON_IMAGE',
+ envvar="PYTHON_IMAGE",
)
option_builder = click.option(
- '--builder',
+ "--builder",
help="Buildx builder used to perform `docker buildx build` commands.",
- envvar='BUILDER',
- default='default',
+ envvar="BUILDER",
+ default="default",
)
option_include_success_outputs = click.option(
- '--include-success-outputs',
+ "--include-success-outputs",
help="Whether to include outputs of successful parallel runs (skipped by default).",
is_flag=True,
- envvar='INCLUDE_SUCCESS_OUTPUTS',
+ envvar="INCLUDE_SUCCESS_OUTPUTS",
)
option_skip_cleanup = click.option(
- '--skip-cleanup',
+ "--skip-cleanup",
help="Skip cleanup of temporary files created during parallel run.",
is_flag=True,
- envvar='SKIP_CLEANUP',
+ envvar="SKIP_CLEANUP",
)
option_include_mypy_volume = click.option(
- '--include-mypy-volume',
+ "--include-mypy-volume",
help="Whether to include mounting of the mypy volume (useful for debugging mypy).",
is_flag=True,
- envvar='INCLUDE_MYPY_VOLUME',
+ envvar="INCLUDE_MYPY_VOLUME",
)
option_max_time = click.option(
- '--max-time',
+ "--max-time",
help="Maximum time that the command should take - if it takes longer, the command will fail.",
type=click.IntRange(min=1),
- envvar='MAX_TIME',
+ envvar="MAX_TIME",
callback=_set_default_from_parent,
)
option_debug_resources = click.option(
- '--debug-resources',
+ "--debug-resources",
is_flag=True,
help="Whether to show resource information while running in parallel.",
- envvar='DEBUG_RESOURCES',
+ envvar="DEBUG_RESOURCES",
)
diff --git a/dev/breeze/src/airflow_breeze/utils/confirm.py b/dev/breeze/src/airflow_breeze/utils/confirm.py
index 4186e3b984..6fbcadea5e 100644
--- a/dev/breeze/src/airflow_breeze/utils/confirm.py
+++ b/dev/breeze/src/airflow_breeze/utils/confirm.py
@@ -58,7 +58,7 @@ def user_confirm(
allowed_answers = "y/n/q" if quit_allowed else "y/n"
while True:
try:
- force = forced_answer or os.environ.get('ANSWER')
+ force = forced_answer or os.environ.get("ANSWER")
if force:
user_status = force
print(f"Forced answer for '{message}': {force}")
@@ -72,7 +72,7 @@ def user_confirm(
else:
timeout = None
timeout_answer = ""
- message_prompt = f'\n{message} \nPress {allowed_answers}'
+ message_prompt = f"\n{message} \nPress {allowed_answers}"
if default_answer and timeout:
message_prompt += (
f". Auto-select {timeout_answer} in {timeout} seconds "
@@ -83,16 +83,16 @@ def user_confirm(
prompt=message_prompt,
timeout=timeout,
)
- if user_status == '':
+ if user_status == "":
if default_answer:
return default_answer
else:
continue
- if user_status.upper() in ['Y', 'YES']:
+ if user_status.upper() in ["Y", "YES"]:
return Answer.YES
- elif user_status.upper() in ['N', 'NO']:
+ elif user_status.upper() in ["N", "NO"]:
return Answer.NO
- elif user_status.upper() in ['Q', 'QUIT'] and quit_allowed:
+ elif user_status.upper() in ["Q", "QUIT"] and quit_allowed:
return Answer.QUIT
else:
print(f"Wrong answer given {user_status}. Should be one of {allowed_answers}. Try again.")
diff --git a/dev/breeze/src/airflow_breeze/utils/console.py b/dev/breeze/src/airflow_breeze/utils/console.py
index 368b9bf8bf..72d3cd2f30 100644
--- a/dev/breeze/src/airflow_breeze/utils/console.py
+++ b/dev/breeze/src/airflow_breeze/utils/console.py
@@ -36,7 +36,7 @@ def get_theme() -> Theme:
try:
from airflow_breeze.utils.cache import read_from_cache_file
- if read_from_cache_file('suppress_colour') is not None:
+ if read_from_cache_file("suppress_colour") is not None:
return Theme(
{
"success": "bold italic",
diff --git a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
index df6387af99..ed0ab00db2 100644
--- a/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
@@ -122,11 +122,11 @@ def get_extra_docker_flags(mount_sources: str, include_mypy_volume: bool = False
for (src, dst) in VOLUMES_FOR_SELECTED_MOUNTS:
if (AIRFLOW_SOURCES_ROOT / src).exists():
extra_docker_flags.extend(
- ["--mount", f'type=bind,src={AIRFLOW_SOURCES_ROOT / src},dst={dst}']
+ ["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT / src},dst={dst}"]
)
if include_mypy_volume:
extra_docker_flags.extend(
- ['--mount', "type=volume,src=mypy-cache-volume,dst=/opt/airflow/.mypy_cache"]
+ ["--mount", "type=volume,src=mypy-cache-volume,dst=/opt/airflow/.mypy_cache"]
)
elif mount_sources == MOUNT_REMOVE:
extra_docker_flags.extend(
@@ -188,13 +188,13 @@ def check_docker_permission_denied(verbose: bool) -> bool:
)
if command_result.returncode != 0:
permission_denied = True
- if command_result.stdout and 'Got permission denied while trying to connect' in command_result.stdout:
+ if command_result.stdout and "Got permission denied while trying to connect" in command_result.stdout:
get_console().print(
- 'ERROR: You have `permission denied` error when trying to communicate with docker.'
+ "ERROR: You have `permission denied` error when trying to communicate with docker."
)
get_console().print(
- 'Most likely you need to add your user to `docker` group: \
- https://docs.docker.com/ engine/install/linux-postinstall/ .'
+ "Most likely you need to add your user to `docker` group: \
+ https://docs.docker.com/ engine/install/linux-postinstall/ ."
)
return permission_denied
@@ -218,8 +218,8 @@ def check_docker_is_running(verbose: bool):
)
if response.returncode != 0:
get_console().print(
- '[error]Docker is not running.[/]\n'
- '[warning]Please make sure Docker is installed and running.[/]'
+ "[error]Docker is not running.[/]\n"
+ "[warning]Please make sure Docker is installed and running.[/]"
)
sys.exit(1)
@@ -235,8 +235,8 @@ def check_docker_version(verbose: bool):
"""
permission_denied = check_docker_permission_denied(verbose)
if not permission_denied:
- docker_version_command = ['docker', 'version', '--format', '{{.Client.Version}}']
- docker_version = ''
+ docker_version_command = ["docker", "version", "--format", "{{.Client.Version}}"]
+ docker_version = ""
docker_version_result = run_command(
docker_version_command,
verbose=verbose,
@@ -247,7 +247,7 @@ def check_docker_version(verbose: bool):
)
if docker_version_result.returncode == 0:
docker_version = docker_version_result.stdout.strip()
- if docker_version == '':
+ if docker_version == "":
get_console().print(
f"""
[warning]Your version of docker is unknown. If the scripts fail, please make sure to[/]
@@ -257,7 +257,7 @@ def check_docker_version(verbose: bool):
else:
good_version = compare_version(docker_version, MIN_DOCKER_VERSION)
if good_version:
- get_console().print(f'[success]Good version of Docker: {docker_version}.[/]')
+ get_console().print(f"[success]Good version of Docker: {docker_version}.[/]")
else:
get_console().print(
f"""
@@ -278,7 +278,7 @@ def check_docker_compose_version(verbose: bool):
:param verbose: print commands when running
"""
- version_pattern = re.compile(r'(\d+)\.(\d+)\.(\d+)')
+ version_pattern = re.compile(r"(\d+)\.(\d+)\.(\d+)")
docker_compose_version_command = ["docker-compose", "--version"]
try:
docker_compose_version_result = run_command(
@@ -298,15 +298,15 @@ def check_docker_compose_version(verbose: bool):
text=True,
)
DOCKER_COMPOSE_COMMAND.clear()
- DOCKER_COMPOSE_COMMAND.extend(['docker', 'compose'])
+ DOCKER_COMPOSE_COMMAND.extend(["docker", "compose"])
if docker_compose_version_result.returncode == 0:
docker_compose_version = docker_compose_version_result.stdout
version_extracted = version_pattern.search(docker_compose_version)
if version_extracted is not None:
- docker_version = '.'.join(version_extracted.groups())
+ docker_version = ".".join(version_extracted.groups())
good_version = compare_version(docker_version, MIN_DOCKER_COMPOSE_VERSION)
if good_version:
- get_console().print(f'[success]Good version of docker-compose: {docker_version}[/]')
+ get_console().print(f"[success]Good version of docker-compose: {docker_version}[/]")
else:
get_console().print(
f"""
@@ -343,20 +343,20 @@ def check_docker_context(verbose: bool):
)
if response.returncode != 0:
get_console().print(
- '[warning]Could not check for Docker context.[/]\n'
+ "[warning]Could not check for Docker context.[/]\n"
'[warning]Please make sure that Docker is using the right context by running "docker info" and '
- 'checking the active Context.[/]'
+ "checking the active Context.[/]"
)
return
- used_docker_context = response.stdout.strip().replace('"', '')
+ used_docker_context = response.stdout.strip().replace('"', "")
if used_docker_context == expected_docker_context:
- get_console().print(f'[success]Good Docker context used: {used_docker_context}.[/]')
+ get_console().print(f"[success]Good Docker context used: {used_docker_context}.[/]")
else:
get_console().print(
- f'[error]Docker is not using the default context, used context is: {used_docker_context}[/]\n'
- f'[warning]Please make sure Docker is using the {expected_docker_context} context.[/]\n'
+ f"[error]Docker is not using the default context, used context is: {used_docker_context}[/]\n"
+ f"[warning]Please make sure Docker is using the {expected_docker_context} context.[/]\n"
f'[warning]You can try switching contexts by running: "docker context use '
f'{expected_docker_context}"[/]'
)
@@ -365,7 +365,7 @@ def check_docker_context(verbose: bool):
def get_env_variable_value(arg_name: str, params: CommonBuildParams | ShellParams):
raw_value = getattr(params, arg_name, None)
- value = str(raw_value) if raw_value is not None else ''
+ value = str(raw_value) if raw_value is not None else ""
value = "true" if raw_value is True else value
value = "false" if raw_value is False else value
if arg_name == "upgrade_to_newer_dependencies" and value == "true":
@@ -422,7 +422,7 @@ def prepare_docker_build_cache_command(
final_command.extend(arguments)
final_command.extend(["--target", "main", "."])
final_command.extend(
- ["-f", 'Dockerfile' if isinstance(image_params, BuildProdParams) else 'Dockerfile.ci']
+ ["-f", "Dockerfile" if isinstance(image_params, BuildProdParams) else "Dockerfile.ci"]
)
final_command.extend(["--platform", image_params.platform])
final_command.extend(
@@ -483,7 +483,7 @@ def prepare_docker_build_command(
final_command.extend(arguments)
final_command.extend(["-t", image_params.airflow_image_name_with_tag, "--target", "main", "."])
final_command.extend(
- ["-f", 'Dockerfile' if isinstance(image_params, BuildProdParams) else 'Dockerfile.ci']
+ ["-f", "Dockerfile" if isinstance(image_params, BuildProdParams) else "Dockerfile.ci"]
)
final_command.extend(["--platform", image_params.platform])
return final_command
@@ -536,11 +536,11 @@ def build_cache(
def make_sure_builder_configured(params: CommonBuildParams, dry_run: bool, verbose: bool):
- if params.builder != 'default':
- cmd = ['docker', 'buildx', 'inspect', params.builder]
+ if params.builder != "default":
+ cmd = ["docker", "buildx", "inspect", params.builder]
buildx_command_result = run_command(cmd, verbose=verbose, dry_run=dry_run, text=True, check=False)
if buildx_command_result and buildx_command_result.returncode != 0:
- next_cmd = ['docker', 'buildx', 'create', '--name', params.builder]
+ next_cmd = ["docker", "buildx", "create", "--name", params.builder]
run_command(next_cmd, verbose=verbose, text=True, check=False)
@@ -562,46 +562,46 @@ def update_expected_environment_variables(env: dict[str, str]) -> None:
:param env: environment variables to update with missing values if not set.
"""
- set_value_to_default_if_not_set(env, 'AIRFLOW_CONSTRAINTS_MODE', "constraints-source-providers")
- set_value_to_default_if_not_set(env, 'AIRFLOW_CONSTRAINTS_REFERENCE', "constraints-source-providers")
- set_value_to_default_if_not_set(env, 'AIRFLOW_EXTRAS', "")
- set_value_to_default_if_not_set(env, 'ANSWER', "")
- set_value_to_default_if_not_set(env, 'BREEZE', "true")
- set_value_to_default_if_not_set(env, 'BREEZE_INIT_COMMAND', "")
- set_value_to_default_if_not_set(env, 'CI', "false")
- set_value_to_default_if_not_set(env, 'CI_BUILD_ID', "0")
- set_value_to_default_if_not_set(env, 'CI_EVENT_TYPE', "pull_request")
- set_value_to_default_if_not_set(env, 'CI_JOB_ID', "0")
- set_value_to_default_if_not_set(env, 'CI_TARGET_BRANCH', AIRFLOW_BRANCH)
- set_value_to_default_if_not_set(env, 'CI_TARGET_REPO', APACHE_AIRFLOW_GITHUB_REPOSITORY)
- set_value_to_default_if_not_set(env, 'COMMIT_SHA', commit_sha())
- set_value_to_default_if_not_set(env, 'DB_RESET', "false")
- set_value_to_default_if_not_set(env, 'DEFAULT_BRANCH', AIRFLOW_BRANCH)
- set_value_to_default_if_not_set(env, 'ENABLED_SYSTEMS', "")
- set_value_to_default_if_not_set(env, 'ENABLE_TEST_COVERAGE', "false")
- set_value_to_default_if_not_set(env, 'HOST_GROUP_ID', get_host_group_id())
- set_value_to_default_if_not_set(env, 'HOST_OS', get_host_os())
- set_value_to_default_if_not_set(env, 'HOST_USER_ID', get_host_user_id())
- set_value_to_default_if_not_set(env, 'INIT_SCRIPT_FILE', "init.sh")
- set_value_to_default_if_not_set(env, 'INSTALL_PACKAGES_FROM_CONTEXT', "false")
- set_value_to_default_if_not_set(env, 'INSTALL_PROVIDERS_FROM_SOURCES', "true")
- set_value_to_default_if_not_set(env, 'LIST_OF_INTEGRATION_TESTS_TO_RUN', "")
- set_value_to_default_if_not_set(env, 'LOAD_DEFAULT_CONNECTIONS', "false")
- set_value_to_default_if_not_set(env, 'LOAD_EXAMPLES', "false")
- set_value_to_default_if_not_set(env, 'PACKAGE_FORMAT', ALLOWED_PACKAGE_FORMATS[0])
- set_value_to_default_if_not_set(env, 'PYTHONDONTWRITEBYTECODE', "true")
- set_value_to_default_if_not_set(env, 'RUN_SYSTEM_TESTS', "false")
- set_value_to_default_if_not_set(env, 'RUN_TESTS', "false")
- set_value_to_default_if_not_set(env, 'SKIP_ENVIRONMENT_INITIALIZATION', "false")
- set_value_to_default_if_not_set(env, 'SKIP_SSH_SETUP', "false")
- set_value_to_default_if_not_set(env, 'TEST_TYPE', "")
- set_value_to_default_if_not_set(env, 'TEST_TIMEOUT', "60")
- set_value_to_default_if_not_set(env, 'UPGRADE_TO_NEWER_DEPENDENCIES', "false")
- set_value_to_default_if_not_set(env, 'USE_PACKAGES_FROM_DIST', "false")
- set_value_to_default_if_not_set(env, 'VERBOSE', "false")
- set_value_to_default_if_not_set(env, 'VERBOSE_COMMANDS', "false")
- set_value_to_default_if_not_set(env, 'VERSION_SUFFIX_FOR_PYPI', "")
- set_value_to_default_if_not_set(env, 'WHEEL_VERSION', "0.36.2")
+ set_value_to_default_if_not_set(env, "AIRFLOW_CONSTRAINTS_MODE", "constraints-source-providers")
+ set_value_to_default_if_not_set(env, "AIRFLOW_CONSTRAINTS_REFERENCE", "constraints-source-providers")
+ set_value_to_default_if_not_set(env, "AIRFLOW_EXTRAS", "")
+ set_value_to_default_if_not_set(env, "ANSWER", "")
+ set_value_to_default_if_not_set(env, "BREEZE", "true")
+ set_value_to_default_if_not_set(env, "BREEZE_INIT_COMMAND", "")
+ set_value_to_default_if_not_set(env, "CI", "false")
+ set_value_to_default_if_not_set(env, "CI_BUILD_ID", "0")
+ set_value_to_default_if_not_set(env, "CI_EVENT_TYPE", "pull_request")
+ set_value_to_default_if_not_set(env, "CI_JOB_ID", "0")
+ set_value_to_default_if_not_set(env, "CI_TARGET_BRANCH", AIRFLOW_BRANCH)
+ set_value_to_default_if_not_set(env, "CI_TARGET_REPO", APACHE_AIRFLOW_GITHUB_REPOSITORY)
+ set_value_to_default_if_not_set(env, "COMMIT_SHA", commit_sha())
+ set_value_to_default_if_not_set(env, "DB_RESET", "false")
+ set_value_to_default_if_not_set(env, "DEFAULT_BRANCH", AIRFLOW_BRANCH)
+ set_value_to_default_if_not_set(env, "ENABLED_SYSTEMS", "")
+ set_value_to_default_if_not_set(env, "ENABLE_TEST_COVERAGE", "false")
+ set_value_to_default_if_not_set(env, "HOST_GROUP_ID", get_host_group_id())
+ set_value_to_default_if_not_set(env, "HOST_OS", get_host_os())
+ set_value_to_default_if_not_set(env, "HOST_USER_ID", get_host_user_id())
+ set_value_to_default_if_not_set(env, "INIT_SCRIPT_FILE", "init.sh")
+ set_value_to_default_if_not_set(env, "INSTALL_PACKAGES_FROM_CONTEXT", "false")
+ set_value_to_default_if_not_set(env, "INSTALL_PROVIDERS_FROM_SOURCES", "true")
+ set_value_to_default_if_not_set(env, "LIST_OF_INTEGRATION_TESTS_TO_RUN", "")
+ set_value_to_default_if_not_set(env, "LOAD_DEFAULT_CONNECTIONS", "false")
+ set_value_to_default_if_not_set(env, "LOAD_EXAMPLES", "false")
+ set_value_to_default_if_not_set(env, "PACKAGE_FORMAT", ALLOWED_PACKAGE_FORMATS[0])
+ set_value_to_default_if_not_set(env, "PYTHONDONTWRITEBYTECODE", "true")
+ set_value_to_default_if_not_set(env, "RUN_SYSTEM_TESTS", "false")
+ set_value_to_default_if_not_set(env, "RUN_TESTS", "false")
+ set_value_to_default_if_not_set(env, "SKIP_ENVIRONMENT_INITIALIZATION", "false")
+ set_value_to_default_if_not_set(env, "SKIP_SSH_SETUP", "false")
+ set_value_to_default_if_not_set(env, "TEST_TYPE", "")
+ set_value_to_default_if_not_set(env, "TEST_TIMEOUT", "60")
+ set_value_to_default_if_not_set(env, "UPGRADE_TO_NEWER_DEPENDENCIES", "false")
+ set_value_to_default_if_not_set(env, "USE_PACKAGES_FROM_DIST", "false")
+ set_value_to_default_if_not_set(env, "VERBOSE", "false")
+ set_value_to_default_if_not_set(env, "VERBOSE_COMMANDS", "false")
+ set_value_to_default_if_not_set(env, "VERSION_SUFFIX_FOR_PYPI", "")
+ set_value_to_default_if_not_set(env, "WHEEL_VERSION", "0.36.2")
DERIVE_ENV_VARIABLES_FROM_ATTRIBUTES = {
@@ -617,8 +617,8 @@ DERIVE_ENV_VARIABLES_FROM_ATTRIBUTES = {
"ANSWER": "answer",
"BACKEND": "backend",
"COMPOSE_FILE": "compose_file",
- "DB_RESET": 'db_reset',
- "DEV_MODE": 'dev_mode',
+ "DB_RESET": "db_reset",
+ "DEV_MODE": "dev_mode",
"DEFAULT_CONSTRAINTS_BRANCH": "default_constraints_branch",
"ENABLED_INTEGRATIONS": "enabled_integrations",
"GITHUB_ACTIONS": "github_actions",
diff --git a/dev/breeze/src/airflow_breeze/utils/find_newer_dependencies.py b/dev/breeze/src/airflow_breeze/utils/find_newer_dependencies.py
index 1fc3f2000b..1d487a516a 100644
--- a/dev/breeze/src/airflow_breeze/utils/find_newer_dependencies.py
+++ b/dev/breeze/src/airflow_breeze/utils/find_newer_dependencies.py
@@ -105,8 +105,8 @@ def get_releases_and_upload_times(package, min_date, current_version, tz) -> lis
package_info = json.loads(requests.get(f"https://pypi.python.org/pypi/{package}/json").text)
releases: list[tuple[Any, Any]] = []
- for release_version, release_info in package_info['releases'].items():
- if release_info and not release_info[0]['yanked']:
+ for release_version, release_info in package_info["releases"].items():
+ if release_info and not release_info[0]["yanked"]:
parsed_version = version.parse(release_version)
if (
parsed_version.is_prerelease
@@ -114,7 +114,7 @@ def get_releases_and_upload_times(package, min_date, current_version, tz) -> lis
or parsed_version == current_version
):
continue
- upload_date = tz.convert(isoparse(release_info[0]['upload_time_iso_8601'])).replace(microsecond=0)
+ upload_date = tz.convert(isoparse(release_info[0]["upload_time_iso_8601"])).replace(microsecond=0)
if upload_date >= min_date:
releases.append((parsed_version, upload_date))
return releases
diff --git a/dev/breeze/src/airflow_breeze/utils/github_actions.py b/dev/breeze/src/airflow_breeze/utils/github_actions.py
index e96937b100..6b8043aa7e 100644
--- a/dev/breeze/src/airflow_breeze/utils/github_actions.py
+++ b/dev/breeze/src/airflow_breeze/utils/github_actions.py
@@ -24,7 +24,7 @@ from airflow_breeze.utils.console import get_console
def get_ga_output(name: str, value: Any) -> str:
- output_name = name.replace('_', '-')
+ output_name = name.replace("_", "-")
printed_value = str(value).lower() if isinstance(value, bool) else value
get_console().print(f"[info]{output_name}[/] = [green]{escape(str(printed_value))}[/]")
return f"{output_name}={printed_value}"
diff --git a/dev/breeze/src/airflow_breeze/utils/host_info_utils.py b/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
index 39799ab9c9..13e49f7f8f 100644
--- a/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/host_info_utils.py
@@ -33,20 +33,20 @@ class Architecture(Enum):
def get_host_user_id() -> str:
from airflow_breeze.utils.run_utils import run_command
- host_user_id = ''
+ host_user_id = ""
os = get_host_os()
- if os == 'linux' or os == 'darwin':
- host_user_id = run_command(cmd=['id', '-ur'], capture_output=True, text=True).stdout.strip()
+ if os == "linux" or os == "darwin":
+ host_user_id = run_command(cmd=["id", "-ur"], capture_output=True, text=True).stdout.strip()
return host_user_id
def get_host_group_id() -> str:
from airflow_breeze.utils.run_utils import run_command
- host_group_id = ''
+ host_group_id = ""
os = get_host_os()
- if os == 'linux' or os == 'darwin':
- host_group_id = run_command(cmd=['id', '-gr'], capture_output=True, text=True).stdout.strip()
+ if os == "linux" or os == "darwin":
+ host_group_id = run_command(cmd=["id", "-gr"], capture_output=True, text=True).stdout.strip()
return host_group_id
diff --git a/dev/breeze/src/airflow_breeze/utils/image.py b/dev/breeze/src/airflow_breeze/utils/image.py
index 15ed2b9c08..bf7bf09437 100644
--- a/dev/breeze/src/airflow_breeze/utils/image.py
+++ b/dev/breeze/src/airflow_breeze/utils/image.py
@@ -84,7 +84,7 @@ def run_pull_in_parallel(
"verbose": verbose,
}
if verify:
- d['extra_pytest_args'] = extra_pytest_args
+ d["extra_pytest_args"] = extra_pytest_args
return d
results = [
diff --git a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py
index 0e0ac16e05..4f4d63dab2 100644
--- a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py
@@ -82,9 +82,9 @@ def get_kind_cluster_config_path(python: str, kubernetes_version: str) -> Path:
def get_architecture_string_for_urls() -> str:
architecture, machine = get_host_architecture()
if architecture == Architecture.X86_64:
- return 'amd64'
+ return "amd64"
if architecture == Architecture.ARM:
- return 'arm64'
+ return "arm64"
raise Exception(f"The architecture {architecture} is not supported when downloading kubernetes tools!")
@@ -246,8 +246,8 @@ def _check_architecture_supported():
architecture, machine = get_host_architecture()
if architecture not in ALLOWED_ARCHITECTURES:
get_console().print(
- f'[error]The {architecture} is not one '
- f'of the supported: {ALLOWED_ARCHITECTURES}. The original machine: {machine}'
+ f"[error]The {architecture} is not one "
+ f"of the supported: {ALLOWED_ARCHITECTURES}. The original machine: {machine}"
)
sys.exit(1)
@@ -265,7 +265,7 @@ def make_sure_kubernetes_tools_are_installed(verbose: bool, dry_run: bool):
_download_kubectl_if_needed(verbose=verbose, dry_run=dry_run)
_download_helm_if_needed(verbose=verbose, dry_run=dry_run)
new_env = os.environ.copy()
- new_env['PATH'] = str(K8S_BIN_BASE_PATH) + os.pathsep + new_env['PATH']
+ new_env["PATH"] = str(K8S_BIN_BASE_PATH) + os.pathsep + new_env["PATH"]
result = run_command(
["helm", "repo", "list"],
verbose=verbose,
@@ -291,15 +291,15 @@ def make_sure_kubernetes_tools_are_installed(verbose: bool, dry_run: bool):
def _requirements_changed() -> bool:
if not CACHED_K8S_REQUIREMENTS.exists():
get_console().print(
- f'\n[warning]The K8S venv in {K8S_ENV_PATH}. has never been created. Installing it.\n'
+ f"\n[warning]The K8S venv in {K8S_ENV_PATH}. has never been created. Installing it.\n"
)
return True
requirements_file_content = K8S_REQUIREMENTS.read_text()
cached_requirements_content = CACHED_K8S_REQUIREMENTS.read_text()
if cached_requirements_content != requirements_file_content:
get_console().print(
- f'\n[warning]Requirements changed for the K8S venv in {K8S_ENV_PATH}. '
- f'Reinstalling the venv.\n'
+ f"\n[warning]Requirements changed for the K8S venv in {K8S_ENV_PATH}. "
+ f"Reinstalling the venv.\n"
)
return True
return False
@@ -331,8 +331,8 @@ def _install_packages_in_k8s_virtualenv(dry_run: bool, verbose: bool, with_const
)
if install_packages_result.returncode != 0:
get_console().print(
- f'[error]Error when updating pip to {PIP_VERSION}:[/]\n'
- f'{install_packages_result.stdout}\n{install_packages_result.stderr}'
+ f"[error]Error when updating pip to {PIP_VERSION}:[/]\n"
+ f"{install_packages_result.stdout}\n{install_packages_result.stderr}"
)
return install_packages_result
@@ -349,14 +349,14 @@ def create_virtualenv(force_venv_setup: bool, verbose: bool, dry_run: bool) -> R
capture_output=True,
)
if python_command_result.returncode == 0:
- get_console().print(f'[success]K8S Virtualenv is initialized in {K8S_ENV_PATH}')
+ get_console().print(f"[success]K8S Virtualenv is initialized in {K8S_ENV_PATH}")
return python_command_result
except FileNotFoundError:
pass
if force_venv_setup:
- get_console().print(f'[info]Forcing initializing K8S virtualenv in {K8S_ENV_PATH}')
+ get_console().print(f"[info]Forcing initializing K8S virtualenv in {K8S_ENV_PATH}")
else:
- get_console().print(f'[info]Initializing K8S virtualenv in {K8S_ENV_PATH}')
+ get_console().print(f"[info]Initializing K8S virtualenv in {K8S_ENV_PATH}")
shutil.rmtree(K8S_ENV_PATH, ignore_errors=True)
venv_command_result = run_command(
[sys.executable, "-m", "venv", str(K8S_ENV_PATH)],
@@ -367,11 +367,11 @@ def create_virtualenv(force_venv_setup: bool, verbose: bool, dry_run: bool) -> R
)
if venv_command_result.returncode != 0:
get_console().print(
- f'[error]Error when initializing K8S virtualenv in {K8S_ENV_PATH}:[/]\n'
- f'{venv_command_result.stdout}\n{venv_command_result.stderr}'
+ f"[error]Error when initializing K8S virtualenv in {K8S_ENV_PATH}:[/]\n"
+ f"{venv_command_result.stdout}\n{venv_command_result.stderr}"
)
return venv_command_result
- get_console().print(f'[info]Reinstalling PIP version in {K8S_ENV_PATH}')
+ get_console().print(f"[info]Reinstalling PIP version in {K8S_ENV_PATH}")
pip_reinstall_result = run_command(
[str(PYTHON_BIN_PATH), "-m", "pip", "install", f"pip=={PIP_VERSION}"],
verbose=verbose,
@@ -381,11 +381,11 @@ def create_virtualenv(force_venv_setup: bool, verbose: bool, dry_run: bool) -> R
)
if pip_reinstall_result.returncode != 0:
get_console().print(
- f'[error]Error when updating pip to {PIP_VERSION}:[/]\n'
- f'{pip_reinstall_result.stdout}\n{pip_reinstall_result.stderr}'
+ f"[error]Error when updating pip to {PIP_VERSION}:[/]\n"
+ f"{pip_reinstall_result.stdout}\n{pip_reinstall_result.stderr}"
)
return pip_reinstall_result
- get_console().print(f'[info]Installing necessary packages in {K8S_ENV_PATH}')
+ get_console().print(f"[info]Installing necessary packages in {K8S_ENV_PATH}")
install_packages_result = _install_packages_in_k8s_virtualenv(
dry_run=dry_run, verbose=verbose, with_constraints=True
@@ -431,19 +431,19 @@ def run_command_with_k8s_env(
def get_k8s_env(python: str, kubernetes_version: str, executor: str | None = None) -> dict[str, str]:
new_env = os.environ.copy()
- new_env['PATH'] = str(K8S_BIN_BASE_PATH) + os.pathsep + new_env['PATH']
- new_env['KUBECONFIG'] = str(get_kubeconfig_file(python=python, kubernetes_version=kubernetes_version))
- new_env['KINDCONFIG'] = str(
+ new_env["PATH"] = str(K8S_BIN_BASE_PATH) + os.pathsep + new_env["PATH"]
+ new_env["KUBECONFIG"] = str(get_kubeconfig_file(python=python, kubernetes_version=kubernetes_version))
+ new_env["KINDCONFIG"] = str(
get_kind_cluster_config_path(python=python, kubernetes_version=kubernetes_version)
)
api_server_port, web_server_port = _get_kubernetes_port_numbers(
python=python, kubernetes_version=kubernetes_version
)
- new_env['CLUSTER_FORWARDED_PORT'] = str(web_server_port)
+ new_env["CLUSTER_FORWARDED_PORT"] = str(web_server_port)
kubectl_cluster_name = get_kubectl_cluster_name(python=python, kubernetes_version=kubernetes_version)
if executor:
- new_env['PS1'] = f"({kubectl_cluster_name}:{executor})> "
- new_env['EXECUTOR'] = executor
+ new_env["PS1"] = f"({kubectl_cluster_name}:{executor})> "
+ new_env["EXECUTOR"] = executor
return new_env
@@ -505,8 +505,8 @@ def _get_kubernetes_port_numbers(python: str, kubernetes_version: str) -> tuple[
conf = _get_kind_cluster_config_content(python=python, kubernetes_version=kubernetes_version)
if conf is None:
return 0, 0
- api_server_port = conf['networking']['apiServerPort']
- web_server_port = conf['nodes'][1]['extraPortMappings'][0]['hostPort']
+ api_server_port = conf["networking"]["apiServerPort"]
+ web_server_port = conf["nodes"][1]["extraPortMappings"][0]["hostPort"]
return api_server_port, web_server_port
diff --git a/dev/breeze/src/airflow_breeze/utils/md5_build_check.py b/dev/breeze/src/airflow_breeze/utils/md5_build_check.py
index df4f50431c..0d9464c259 100644
--- a/dev/breeze/src/airflow_breeze/utils/md5_build_check.py
+++ b/dev/breeze/src/airflow_breeze/utils/md5_build_check.py
@@ -77,8 +77,8 @@ def calculate_md5_checksum_for_files(
md5_checksum = generate_md5(file_to_get_md5)
sub_dir_name = file_to_get_md5.parts[-2]
actual_file_name = file_to_get_md5.parts[-1]
- cache_file_name = Path(md5sum_cache_dir, sub_dir_name + '-' + actual_file_name + '.md5sum')
- file_content = md5_checksum + ' ' + str(file_to_get_md5) + '\n'
+ cache_file_name = Path(md5sum_cache_dir, sub_dir_name + "-" + actual_file_name + ".md5sum")
+ file_content = md5_checksum + " " + str(file_to_get_md5) + "\n"
is_modified = check_md5checksum_in_cache_modified(file_content, cache_file_name, update=update)
if is_modified:
modified_files.append(calculate_md5_file)
@@ -99,17 +99,17 @@ def md5sum_check_if_build_is_needed(md5sum_cache_dir: Path) -> bool:
modified_files, not_modified_files = calculate_md5_checksum_for_files(md5sum_cache_dir, update=False)
if len(modified_files) > 0:
get_console().print(
- f'[warning]The following important files are modified in {AIRFLOW_SOURCES_ROOT} '
- f'since last time image was built: [/]\n\n'
+ f"[warning]The following important files are modified in {AIRFLOW_SOURCES_ROOT} "
+ f"since last time image was built: [/]\n\n"
)
for file in modified_files:
get_console().print(f" * [info]{file}[/]")
- get_console().print('\n[warning]Likely CI image needs rebuild[/]\n')
+ get_console().print("\n[warning]Likely CI image needs rebuild[/]\n")
build_needed = True
else:
get_console().print(
- '[info]Docker image build is not needed for CI build as no important files are changed! '
- 'You can add --force-build to force it[/]'
+ "[info]Docker image build is not needed for CI build as no important files are changed! "
+ "You can add --force-build to force it[/]"
)
return build_needed
diff --git a/dev/breeze/src/airflow_breeze/utils/parallel.py b/dev/breeze/src/airflow_breeze/utils/parallel.py
index 1141f44013..7ee2d6c2a8 100644
--- a/dev/breeze/src/airflow_breeze/utils/parallel.py
+++ b/dev/breeze/src/airflow_breeze/utils/parallel.py
@@ -56,17 +56,17 @@ def get_output_files(titles: list[str]) -> list[Output]:
def nice_timedelta(delta: datetime.timedelta):
- d = {'d': delta.days}
- d['h'], rem = divmod(delta.seconds, 3600)
- d['m'], d['s'] = divmod(rem, 60)
- return "{d} days {h:02}:{m:02}:{s:02}".format(**d) if d['d'] else "{h:02}:{m:02}:{s:02}".format(**d)
+ d = {"d": delta.days}
+ d["h"], rem = divmod(delta.seconds, 3600)
+ d["m"], d["s"] = divmod(rem, 60)
+ return "{d} days {h:02}:{m:02}:{s:02}".format(**d) if d["d"] else "{h:02}:{m:02}:{s:02}".format(**d)
-ANSI_COLOUR_MATCHER = re.compile(r'(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]')
+ANSI_COLOUR_MATCHER = re.compile(r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]")
def remove_ansi_colours(line):
- return ANSI_COLOUR_MATCHER.sub('', line)
+ return ANSI_COLOUR_MATCHER.sub("", line)
def get_last_lines_of_file(file_name: str, num_lines: int = 2) -> tuple[list[str], list[str]]:
@@ -84,7 +84,7 @@ def get_last_lines_of_file(file_name: str, num_lines: int = 2) -> tuple[list[str
seek_size = min(os.stat(file_name).st_size, max_read)
except FileNotFoundError:
return [], []
- with open(file_name, 'rb') as temp_f:
+ with open(file_name, "rb") as temp_f:
temp_f.seek(-seek_size, os.SEEK_END)
tail = temp_f.read().decode(errors="ignore")
last_lines = tail.splitlines()[-num_lines:]
@@ -102,7 +102,7 @@ class AbstractProgressInfoMatcher(metaclass=ABCMeta):
class DockerBuildxProgressMatcher(AbstractProgressInfoMatcher):
- DOCKER_BUILDX_PROGRESS_MATCHER = re.compile(r'\s*#(\d*) ')
+ DOCKER_BUILDX_PROGRESS_MATCHER = re.compile(r"\s*#(\d*) ")
def __init__(self):
self.last_docker_build_lines: dict[str, str] = {}
@@ -176,23 +176,23 @@ class GenericRegexpProgressMatcher(AbstractProgressInfoMatcher):
return [last_match]
-DOCKER_PULL_PROGRESS_REGEXP = r'^[0-9a-f]+: .*|.*\[[ \d%]*\].*|^Waiting'
+DOCKER_PULL_PROGRESS_REGEXP = r"^[0-9a-f]+: .*|.*\[[ \d%]*\].*|^Waiting"
def bytes2human(n):
- symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+ symbols = ("K", "M", "G", "T", "P", "E", "Z", "Y")
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i + 1) * 10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
- return f'{value:.1f}{s}'
+ return f"{value:.1f}{s}"
return f"{n}B"
def get_printable_value(key: str, value: Any) -> str:
- if key == 'percent':
+ if key == "percent":
return f"{value} %"
if isinstance(value, (int, float)):
return bytes2human(value)
@@ -227,21 +227,21 @@ def get_multi_tuple_array(title: str, tuples: list[tuple[NamedTuple, ...]]) -> T
IGNORED_FSTYPES = [
- 'autofs',
- 'bps',
- 'cgroup',
- 'cgroup2',
- 'configfs',
- 'debugfs',
- 'devpts',
- 'fusectl',
- 'mqueue',
- 'nsfs',
- 'overlay',
- 'proc',
- 'pstore',
- 'squashfs',
- 'tracefs',
+ "autofs",
+ "bps",
+ "cgroup",
+ "cgroup2",
+ "configfs",
+ "debugfs",
+ "devpts",
+ "fusectl",
+ "mqueue",
+ "nsfs",
+ "overlay",
+ "proc",
+ "pstore",
+ "squashfs",
+ "tracefs",
]
diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py b/dev/breeze/src/airflow_breeze/utils/path_utils.py
index 5ebf11cc31..e6b19fdcfe 100644
--- a/dev/breeze/src/airflow_breeze/utils/path_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py
@@ -59,11 +59,11 @@ def in_help() -> bool:
def skip_upgrade_check():
- return in_self_upgrade() or in_autocomplete() or in_help() or hasattr(sys, '_called_from_test')
+ return in_self_upgrade() or in_autocomplete() or in_help() or hasattr(sys, "_called_from_test")
def skip_group_output():
- return in_autocomplete() or in_help() or os.environ.get('SKIP_GROUP_OUTPUT') is not None
+ return in_autocomplete() or in_help() or os.environ.get("SKIP_GROUP_OUTPUT") is not None
def get_package_setup_metadata_hash() -> str:
@@ -83,7 +83,7 @@ def get_package_setup_metadata_hash() -> str:
prefix = "Package config hash: "
- for line in distribution('apache-airflow-breeze').metadata.as_string().splitlines(keepends=False):
+ for line in distribution("apache-airflow-breeze").metadata.as_string().splitlines(keepends=False):
if line.startswith(prefix):
return line[len(prefix) :]
return "NOT FOUND"
@@ -124,11 +124,11 @@ def get_used_sources_setup_metadata_hash() -> str:
def set_forced_answer_for_upgrade_check():
"""When we run upgrade check --answer is not parsed yet, so we need to guess it."""
- if "--answer n" in " ".join(sys.argv).lower() or os.environ.get('ANSWER', '').lower().startswith("n"):
+ if "--answer n" in " ".join(sys.argv).lower() or os.environ.get("ANSWER", "").lower().startswith("n"):
set_forced_answer("no")
- if "--answer y" in " ".join(sys.argv).lower() or os.environ.get('ANSWER', '').lower().startswith("y"):
+ if "--answer y" in " ".join(sys.argv).lower() or os.environ.get("ANSWER", "").lower().startswith("y"):
set_forced_answer("yes")
- if "--answer q" in " ".join(sys.argv).lower() or os.environ.get('ANSWER', '').lower().startswith("q"):
+ if "--answer q" in " ".join(sys.argv).lower() or os.environ.get("ANSWER", "").lower().startswith("q"):
set_forced_answer("quit")
@@ -221,7 +221,7 @@ def find_airflow_sources_root_to_operate_on() -> Path:
:return: Path for the found sources.
"""
- sources_root_from_env = os.getenv('AIRFLOW_SOURCES_ROOT', None)
+ sources_root_from_env = os.getenv("AIRFLOW_SOURCES_ROOT", None)
if sources_root_from_env:
return Path(sources_root_from_env)
installation_airflow_sources = get_installation_airflow_sources()
@@ -242,17 +242,17 @@ def find_airflow_sources_root_to_operate_on() -> Path:
AIRFLOW_SOURCES_ROOT = find_airflow_sources_root_to_operate_on().resolve()
-BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / '.build'
-DAGS_DIR = AIRFLOW_SOURCES_ROOT / 'dags'
-FILES_DIR = AIRFLOW_SOURCES_ROOT / 'files'
-HOOKS_DIR = AIRFLOW_SOURCES_ROOT / 'hooks'
+BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / ".build"
+DAGS_DIR = AIRFLOW_SOURCES_ROOT / "dags"
+FILES_DIR = AIRFLOW_SOURCES_ROOT / "files"
+HOOKS_DIR = AIRFLOW_SOURCES_ROOT / "hooks"
KUBE_DIR = AIRFLOW_SOURCES_ROOT / ".kube"
-LOGS_DIR = AIRFLOW_SOURCES_ROOT / 'logs'
-DIST_DIR = AIRFLOW_SOURCES_ROOT / 'dist'
-SCRIPTS_CI_DIR = AIRFLOW_SOURCES_ROOT / 'scripts' / 'ci'
-DOCKER_CONTEXT_DIR = AIRFLOW_SOURCES_ROOT / 'docker-context-files'
+LOGS_DIR = AIRFLOW_SOURCES_ROOT / "logs"
+DIST_DIR = AIRFLOW_SOURCES_ROOT / "dist"
+SCRIPTS_CI_DIR = AIRFLOW_SOURCES_ROOT / "scripts" / "ci"
+DOCKER_CONTEXT_DIR = AIRFLOW_SOURCES_ROOT / "docker-context-files"
CACHE_TMP_FILE_DIR = tempfile.TemporaryDirectory()
-OUTPUT_LOG = Path(CACHE_TMP_FILE_DIR.name, 'out.log')
+OUTPUT_LOG = Path(CACHE_TMP_FILE_DIR.name, "out.log")
BREEZE_SOURCES_ROOT = AIRFLOW_SOURCES_ROOT / "dev" / "breeze"
MSSQL_TMP_DIR_NAME = ".tmp-mssql"
diff --git a/dev/breeze/src/airflow_breeze/utils/recording.py b/dev/breeze/src/airflow_breeze/utils/recording.py
index b193edde90..f53ef8ce92 100644
--- a/dev/breeze/src/airflow_breeze/utils/recording.py
+++ b/dev/breeze/src/airflow_breeze/utils/recording.py
@@ -31,7 +31,7 @@ DEFAULT_COLUMNS = 129
def generating_command_images() -> bool:
- return 'RECORD_BREEZE_TITLE' in os.environ or "regenerate-command-images" in sys.argv
+ return "RECORD_BREEZE_TITLE" in os.environ or "regenerate-command-images" in sys.argv
def enable_recording_of_help_output(path: str, title: str | None, width: str | None):
@@ -64,14 +64,14 @@ def enable_recording_of_help_output(path: str, title: str | None, width: str | N
click.rich_click.Console = RecordingConsole # type: ignore[misc]
-output_file = os.environ.get('RECORD_BREEZE_OUTPUT_FILE')
+output_file = os.environ.get("RECORD_BREEZE_OUTPUT_FILE")
if output_file and not in_autocomplete():
enable_recording_of_help_output(
path=output_file,
- title=os.environ.get('RECORD_BREEZE_TITLE'),
- width=os.environ.get('RECORD_BREEZE_WIDTH'),
+ title=os.environ.get("RECORD_BREEZE_TITLE"),
+ width=os.environ.get("RECORD_BREEZE_WIDTH"),
)
else:
try:
diff --git a/dev/breeze/src/airflow_breeze/utils/registry.py b/dev/breeze/src/airflow_breeze/utils/registry.py
index 79784fb499..903a5292bd 100644
--- a/dev/breeze/src/airflow_breeze/utils/registry.py
+++ b/dev/breeze/src/airflow_breeze/utils/registry.py
@@ -41,7 +41,7 @@ def login_to_github_docker_registry(
)
elif len(image_params.github_token) > 0:
run_command(
- ['docker', 'logout', 'ghcr.io'],
+ ["docker", "logout", "ghcr.io"],
dry_run=dry_run,
verbose=verbose,
output=output,
@@ -50,12 +50,12 @@ def login_to_github_docker_registry(
)
command_result = run_command(
[
- 'docker',
- 'login',
- '--username',
+ "docker",
+ "login",
+ "--username",
image_params.github_username,
- '--password-stdin',
- 'ghcr.io',
+ "--password-stdin",
+ "ghcr.io",
],
verbose=verbose,
output=output,
@@ -65,5 +65,5 @@ def login_to_github_docker_registry(
)
return command_result.returncode, "Docker login"
else:
- get_console().print('\n[info]Skip Login to GitHub Container Registry as token is missing')
+ get_console().print("\n[info]Skip Login to GitHub Container Registry as token is missing")
return 0, "Docker login skipped"
diff --git a/dev/breeze/src/airflow_breeze/utils/reinstall.py b/dev/breeze/src/airflow_breeze/utils/reinstall.py
index dc01332dab..6c6b563553 100644
--- a/dev/breeze/src/airflow_breeze/utils/reinstall.py
+++ b/dev/breeze/src/airflow_breeze/utils/reinstall.py
@@ -37,7 +37,7 @@ def reinstall_breeze(breeze_sources: Path, re_run: bool = True):
get_console().print(f"\n[info]Reinstalling Breeze from {breeze_sources}\n")
subprocess.check_call(["pipx", "install", "-e", str(breeze_sources), "--force"])
if re_run:
- os.execl(sys.executable, 'breeze', *sys.argv)
+ os.execl(sys.executable, "breeze", *sys.argv)
get_console().print(f"\n[info]Breeze has been reinstalled from {breeze_sources}. Exiting now.[/]\n\n")
sys.exit(0)
diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py b/dev/breeze/src/airflow_breeze/utils/run_tests.py
index 727d1685f7..680fcc6db5 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_tests.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py
@@ -47,14 +47,14 @@ def verify_an_image(
)
return command_result.returncode, f"Testing {image_type} python {image_name}"
pytest_args = ("-n", str(os.cpu_count()), "--color=yes")
- if image_type == 'PROD':
+ if image_type == "PROD":
test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_prod_image.py"
else:
test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_ci_image.py"
env = os.environ.copy()
- env['DOCKER_IMAGE'] = image_name
+ env["DOCKER_IMAGE"] = image_name
if slim_image:
- env['TEST_SLIM_IMAGE'] = 'true'
+ env["TEST_SLIM_IMAGE"] = "true"
command_result = run_command(
[sys.executable, "-m", "pytest", str(test_path), *pytest_args, *extra_pytest_args],
dry_run=dry_run,
@@ -78,7 +78,7 @@ def run_docker_compose_tests(
pytest_args = ("-n", str(os.cpu_count()), "--color=yes")
test_path = AIRFLOW_SOURCES_ROOT / "docker_tests" / "test_docker_compose_quick_start.py"
env = os.environ.copy()
- env['DOCKER_IMAGE'] = image_name
+ env["DOCKER_IMAGE"] = image_name
command_result = run_command(
[sys.executable, "-m", "pytest", str(test_path), *pytest_args, *extra_pytest_args],
dry_run=dry_run,
diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py
index 760d08bab9..5268eef142 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py
@@ -85,7 +85,7 @@ def run_command(
if _index == 0:
# First argument is always passed
return False
- if _arg.startswith('-'):
+ if _arg.startswith("-"):
return True
if len(_arg) == 0:
return True
@@ -111,17 +111,17 @@ def run_command(
]
# Heuristics to get a (possibly) short but explanatory title showing what the command does
# If title is not provided explicitly
- title = "<" + ' '.join(shortened_command[:5]) + ">" # max 4 args
+ title = "<" + " ".join(shortened_command[:5]) + ">" # max 4 args
workdir: str = str(cwd) if cwd else os.getcwd()
cmd_env = os.environ.copy()
cmd_env.setdefault("HOME", str(Path.home()))
if env:
cmd_env.update(env)
if output:
- if 'capture_output' not in kwargs or not kwargs['capture_output']:
- kwargs['stdout'] = output.file
- kwargs['stderr'] = subprocess.STDOUT
- command_to_print = ' '.join(shlex.quote(c) for c in cmd)
+ if "capture_output" not in kwargs or not kwargs["capture_output"]:
+ kwargs["stdout"] = output.file
+ kwargs["stderr"] = subprocess.STDOUT
+ command_to_print = " ".join(shlex.quote(c) for c in cmd)
env_to_print = get_environments_to_print(env)
if not verbose and not dry_run:
return subprocess.run(cmd, input=input, check=check, env=cmd_env, cwd=workdir, **kwargs)
@@ -173,10 +173,10 @@ def get_environments_to_print(env: Mapping[str, str] | None):
system_env[key] = val
else:
my_env[key] = val
- env_to_print = ''.join(f'{key}="{val}" \\\n' for (key, val) in sorted(system_env.items()))
+ env_to_print = "".join(f'{key}="{val}" \\\n' for (key, val) in sorted(system_env.items()))
env_to_print += r"""\
"""
- env_to_print += ''.join(f'{key}="{val}" \\\n' for (key, val) in sorted(my_env.items()))
+ env_to_print += "".join(f'{key}="{val}" \\\n' for (key, val) in sorted(my_env.items()))
return env_to_print
@@ -237,7 +237,7 @@ def get_filesystem_type(filepath: str):
root_type = "unknown"
for part in psutil.disk_partitions(all=True):
- if part.mountpoint == '/':
+ if part.mountpoint == "/":
root_type = part.fstype
continue
if filepath.startswith(part.mountpoint):
@@ -248,7 +248,7 @@ def get_filesystem_type(filepath: str):
def instruct_build_image(python: str):
"""Print instructions to the user that they should build the image"""
- get_console().print(f'[warning]\nThe CI image for Python version {python} may be outdated[/]\n')
+ get_console().print(f"[warning]\nThe CI image for Python version {python} may be outdated[/]\n")
get_console().print(
f"\n[info]Please run at the earliest "
f"convenience:[/]\n\nbreeze ci-image build --python {python}\n\n"
@@ -289,16 +289,16 @@ def fix_group_permissions(verbose: bool):
"""Fixes permissions of all the files and directories that have group-write access."""
if verbose:
get_console().print("[info]Fixing group permissions[/]")
- files_to_fix_result = run_command(['git', 'ls-files', './'], capture_output=True, text=True)
+ files_to_fix_result = run_command(["git", "ls-files", "./"], capture_output=True, text=True)
if files_to_fix_result.returncode == 0:
- files_to_fix = files_to_fix_result.stdout.strip().split('\n')
+ files_to_fix = files_to_fix_result.stdout.strip().split("\n")
for file_to_fix in files_to_fix:
change_file_permission(Path(file_to_fix))
directories_to_fix_result = run_command(
- ['git', 'ls-tree', '-r', '-d', '--name-only', 'HEAD'], capture_output=True, text=True
+ ["git", "ls-tree", "-r", "-d", "--name-only", "HEAD"], capture_output=True, text=True
)
if directories_to_fix_result.returncode == 0:
- directories_to_fix = directories_to_fix_result.stdout.strip().split('\n')
+ directories_to_fix = directories_to_fix_result.stdout.strip().split("\n")
for directory_to_fix in directories_to_fix:
change_directory_permission(Path(directory_to_fix))
@@ -312,7 +312,7 @@ def is_repo_rebased(repo: str, branch: str):
headers_dict = {"Accept": "application/vnd.github.VERSION.sha"}
latest_sha = requests.get(gh_url, headers=headers_dict).text.strip()
rebased = False
- command_result = run_command(['git', 'log', '--format=format:%H'], capture_output=True, text=True)
+ command_result = run_command(["git", "log", "--format=format:%H"], capture_output=True, text=True)
commit_list = command_result.stdout.strip().splitlines() if command_result is not None else "missing"
if latest_sha in commit_list:
rebased = True
@@ -325,7 +325,7 @@ def check_if_buildx_plugin_installed(verbose: bool) -> bool:
:param verbose: print commands when running
:return True if the buildx plugin is installed.
"""
- check_buildx = ['docker', 'buildx', 'version']
+ check_buildx = ["docker", "buildx", "version"]
docker_buildx_version_result = run_command(
check_buildx,
verbose=verbose,
@@ -342,7 +342,7 @@ def check_if_buildx_plugin_installed(verbose: bool) -> bool:
@lru_cache(maxsize=None)
def commit_sha():
"""Returns commit SHA of current repo. Cached for various usages."""
- command_result = run_command(['git', 'rev-parse', 'HEAD'], capture_output=True, text=True, check=False)
+ command_result = run_command(["git", "rev-parse", "HEAD"], capture_output=True, text=True, check=False)
if command_result.stdout:
return command_result.stdout.strip()
else:
@@ -370,10 +370,10 @@ def check_if_image_exists(image: str, verbose: bool, dry_run: bool) -> bool:
def get_ci_image_for_pre_commits(verbose: bool, dry_run: bool) -> str:
- github_repository = os.environ.get('GITHUB_REPOSITORY', APACHE_AIRFLOW_GITHUB_REPOSITORY)
+ github_repository = os.environ.get("GITHUB_REPOSITORY", APACHE_AIRFLOW_GITHUB_REPOSITORY)
python_version = "3.7"
airflow_image = f"ghcr.io/{github_repository}/{AIRFLOW_BRANCH}/ci/python{python_version}"
- skip_image_pre_commits = os.environ.get('SKIP_IMAGE_PRE_COMMITS', "false")
+ skip_image_pre_commits = os.environ.get("SKIP_IMAGE_PRE_COMMITS", "false")
if skip_image_pre_commits[0].lower() == "t":
get_console().print(
f"[info]Skipping image check as SKIP_IMAGE_PRE_COMMITS is set to {skip_image_pre_commits}[/]"
@@ -384,7 +384,7 @@ def get_ci_image_for_pre_commits(verbose: bool, dry_run: bool) -> str:
verbose=verbose,
dry_run=dry_run,
):
- get_console().print(f'[red]The image {airflow_image} is not available.[/]\n')
+ get_console().print(f"[red]The image {airflow_image} is not available.[/]\n")
get_console().print(
f"\n[yellow]Please run this to fix it:[/]\n\n"
f"breeze ci-image build --python {python_version}\n\n"
@@ -424,11 +424,11 @@ def run_compile_www_assets(
sys.executable,
"-m",
"pre_commit",
- 'run',
+ "run",
"--hook-stage",
"manual",
- 'compile-www-assets-dev' if dev else 'compile-www-assets',
- '--all-files',
+ "compile-www-assets-dev" if dev else "compile-www-assets",
+ "--all-files",
]
if run_in_background:
thread = Thread(
diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py
index 3007973afa..a1c9ca44a1 100644
--- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py
+++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py
@@ -75,7 +75,7 @@ class FileGroupForCi(Enum):
ALL_SOURCE_FILES = "all_sources_for_tests"
-T = TypeVar('T', FileGroupForCi, SelectiveUnitTestTypes)
+T = TypeVar("T", FileGroupForCi, SelectiveUnitTestTypes)
class HashableDict(Dict[T, List[str]]):
@@ -220,7 +220,7 @@ def add_dependent_providers(
):
for provider, provider_info in dependencies.items():
# Providers that use this provider
- if provider_to_check in provider_info['cross-providers-deps']:
+ if provider_to_check in provider_info["cross-providers-deps"]:
providers.add(provider)
# and providers we use directly
for dep_name in dependencies[provider_to_check]["cross-providers-deps"]:
@@ -242,7 +242,7 @@ def find_all_providers_affected(changed_files: tuple[str, ...]) -> set[str]:
class SelectiveChecks:
- __HASHABLE_FIELDS = {'_files', '_default_branch', '_commit_ref', "_pr_labels", "_github_event"}
+ __HASHABLE_FIELDS = {"_files", "_default_branch", "_commit_ref", "_pr_labels", "_github_event"}
def __init__(
self,
@@ -274,7 +274,7 @@ class SelectiveChecks:
def __str__(self) -> str:
output = []
for field_name in dir(self):
- if not field_name.startswith('_'):
+ if not field_name.startswith("_"):
output.append(get_ga_output(field_name, getattr(self, field_name)))
return "\n".join(output)
@@ -570,7 +570,7 @@ class SelectiveChecks:
def docs_filter(self) -> str:
return (
""
- if self._default_branch == 'main'
+ if self._default_branch == "main"
else "--package-filter apache-airflow --package-filter docker-stack"
)
diff --git a/dev/breeze/tests/test_cache.py b/dev/breeze/tests/test_cache.py
index 7e9270742e..a37a91165b 100644
--- a/dev/breeze/tests/test_cache.py
+++ b/dev/breeze/tests/test_cache.py
@@ -32,12 +32,12 @@ AIRFLOW_SOURCES = Path(__file__).parents[3].resolve()
@pytest.mark.parametrize(
- 'parameter, value, result, exception',
+ "parameter, value, result, exception",
[
- ("backend", "mysql", (True, ['sqlite', 'mysql', 'postgres', 'mssql']), None),
- ("backend", "xxx", (False, ['sqlite', 'mysql', 'postgres', 'mssql']), None),
- ("python_major_minor_version", "3.8", (True, ['3.7', '3.8', '3.9', '3.10']), None),
- ("python_major_minor_version", "3.5", (False, ['3.7', '3.8', '3.9', '3.10']), None),
+ ("backend", "mysql", (True, ["sqlite", "mysql", "postgres", "mssql"]), None),
+ ("backend", "xxx", (False, ["sqlite", "mysql", "postgres", "mssql"]), None),
+ ("python_major_minor_version", "3.8", (True, ["3.7", "3.8", "3.9", "3.10"]), None),
+ ("python_major_minor_version", "3.5", (False, ["3.7", "3.8", "3.9", "3.10"]), None),
("missing", "value", None, AttributeError),
],
)
@@ -56,7 +56,7 @@ def test_check_if_cache_exists(path):
@pytest.mark.parametrize(
- 'param',
+ "param",
[
"test_param",
"mysql_version",
@@ -73,8 +73,8 @@ def test_read_from_cache_file(param):
assert param_value in param_list
-@mock.patch('airflow_breeze.utils.cache.Path')
-@mock.patch('airflow_breeze.utils.cache.check_if_cache_exists')
+@mock.patch("airflow_breeze.utils.cache.Path")
+@mock.patch("airflow_breeze.utils.cache.check_if_cache_exists")
def test_delete_cache_exists(mock_check_if_cache_exists, mock_path):
param = "MYSQL_VERSION"
mock_check_if_cache_exists.return_value = True
@@ -83,8 +83,8 @@ def test_delete_cache_exists(mock_check_if_cache_exists, mock_path):
assert cache_deleted
-@mock.patch('airflow_breeze.utils.cache.Path')
-@mock.patch('airflow_breeze.utils.cache.check_if_cache_exists')
+@mock.patch("airflow_breeze.utils.cache.Path")
+@mock.patch("airflow_breeze.utils.cache.check_if_cache_exists")
def test_delete_cache_not_exists(mock_check_if_cache_exists, mock_path):
param = "TEST_PARAM"
mock_check_if_cache_exists.return_value = False
diff --git a/dev/breeze/tests/test_docker_command_utils.py b/dev/breeze/tests/test_docker_command_utils.py
index 2705e8c46c..9f8083929e 100644
--- a/dev/breeze/tests/test_docker_command_utils.py
+++ b/dev/breeze/tests/test_docker_command_utils.py
@@ -28,9 +28,9 @@ from airflow_breeze.utils.docker_command_utils import (
)
-@mock.patch('airflow_breeze.utils.docker_command_utils.check_docker_permission_denied')
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.check_docker_permission_denied")
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_version_unknown(
mock_get_console, mock_run_command, mock_check_docker_permission_denied
):
@@ -38,7 +38,7 @@ def test_check_docker_version_unknown(
check_docker_version(verbose=True)
expected_run_command_calls = [
call(
- ['docker', 'version', '--format', '{{.Client.Version}}'],
+ ["docker", "version", "--format", "{{.Client.Version}}"],
verbose=True,
no_output_dump_on_exception=True,
capture_output=True,
@@ -55,9 +55,9 @@ def test_check_docker_version_unknown(
)
-@mock.patch('airflow_breeze.utils.docker_command_utils.check_docker_permission_denied')
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.check_docker_permission_denied")
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_version_too_low(
mock_get_console, mock_run_command, mock_check_docker_permission_denied
):
@@ -67,7 +67,7 @@ def test_check_docker_version_too_low(
check_docker_version(verbose=True)
mock_check_docker_permission_denied.assert_called_with(True)
mock_run_command.assert_called_with(
- ['docker', 'version', '--format', '{{.Client.Version}}'],
+ ["docker", "version", "--format", "{{.Client.Version}}"],
verbose=True,
no_output_dump_on_exception=True,
capture_output=True,
@@ -81,9 +81,9 @@ def test_check_docker_version_too_low(
)
-@mock.patch('airflow_breeze.utils.docker_command_utils.check_docker_permission_denied')
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.check_docker_permission_denied")
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_version_ok(mock_get_console, mock_run_command, mock_check_docker_permission_denied):
mock_check_docker_permission_denied.return_value = False
mock_run_command.return_value.returncode = 0
@@ -91,7 +91,7 @@ def test_check_docker_version_ok(mock_get_console, mock_run_command, mock_check_
check_docker_version(verbose=True)
mock_check_docker_permission_denied.assert_called_with(True)
mock_run_command.assert_called_with(
- ['docker', 'version', '--format', '{{.Client.Version}}'],
+ ["docker", "version", "--format", "{{.Client.Version}}"],
verbose=True,
no_output_dump_on_exception=True,
capture_output=True,
@@ -101,9 +101,9 @@ def test_check_docker_version_ok(mock_get_console, mock_run_command, mock_check_
mock_get_console.return_value.print.assert_called_with("[success]Good version of Docker: 20.10.0.[/]")
-@mock.patch('airflow_breeze.utils.docker_command_utils.check_docker_permission_denied')
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.check_docker_permission_denied")
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_version_higher(mock_get_console, mock_run_command, mock_check_docker_permission_denied):
mock_check_docker_permission_denied.return_value = False
mock_run_command.return_value.returncode = 0
@@ -111,7 +111,7 @@ def test_check_docker_version_higher(mock_get_console, mock_run_command, mock_ch
check_docker_version(verbose=True)
mock_check_docker_permission_denied.assert_called_with(True)
mock_run_command.assert_called_with(
- ['docker', 'version', '--format', '{{.Client.Version}}'],
+ ["docker", "version", "--format", "{{.Client.Version}}"],
verbose=True,
no_output_dump_on_exception=True,
capture_output=True,
@@ -121,8 +121,8 @@ def test_check_docker_version_higher(mock_get_console, mock_run_command, mock_ch
mock_get_console.return_value.print.assert_called_with("[success]Good version of Docker: 21.10.0.[/]")
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_compose_version_unknown(mock_get_console, mock_run_command):
check_docker_compose_version(verbose=True)
expected_run_command_calls = [
@@ -143,8 +143,8 @@ def test_check_docker_compose_version_unknown(mock_get_console, mock_run_command
)
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_compose_version_low(mock_get_console, mock_run_command):
mock_run_command.return_value.returncode = 0
mock_run_command.return_value.stdout = "1.28.5"
@@ -172,8 +172,8 @@ Make sure docker-compose you install is first on the PATH variable of yours.
mock_get_console.return_value.print.assert_has_calls(expected_print_calls)
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_compose_version_ok(mock_get_console, mock_run_command):
mock_run_command.return_value.returncode = 0
mock_run_command.return_value.stdout = "1.29.0"
@@ -190,8 +190,8 @@ def test_check_docker_compose_version_ok(mock_get_console, mock_run_command):
)
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_compose_version_higher(mock_get_console, mock_run_command):
mock_run_command.return_value.returncode = 0
mock_run_command.return_value.stdout = "1.29.2"
@@ -208,8 +208,8 @@ def test_check_docker_compose_version_higher(mock_get_console, mock_run_command)
)
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_context_default(mock_get_console, mock_run_command):
mock_run_command.return_value.returncode = 0
mock_run_command.return_value.stdout = "default"
@@ -221,11 +221,11 @@ def test_check_docker_context_default(mock_get_console, mock_run_command):
text=True,
capture_output=True,
)
- mock_get_console.return_value.print.assert_called_with('[success]Good Docker context used: default.[/]')
+ mock_get_console.return_value.print.assert_called_with("[success]Good Docker context used: default.[/]")
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_context_other(mock_get_console, mock_run_command):
mock_run_command.return_value.returncode = 0
mock_run_command.return_value.stdout = "other"
@@ -239,14 +239,14 @@ def test_check_docker_context_other(mock_get_console, mock_run_command):
capture_output=True,
)
mock_get_console.return_value.print.assert_called_with(
- '[error]Docker is not using the default context, used context is: other[/]\n'
- '[warning]Please make sure Docker is using the default context.[/]\n'
+ "[error]Docker is not using the default context, used context is: other[/]\n"
+ "[warning]Please make sure Docker is using the default context.[/]\n"
'[warning]You can try switching contexts by running: "docker context use default"[/]'
)
-@mock.patch('airflow_breeze.utils.docker_command_utils.run_command')
-@mock.patch('airflow_breeze.utils.docker_command_utils.get_console')
+@mock.patch("airflow_breeze.utils.docker_command_utils.run_command")
+@mock.patch("airflow_breeze.utils.docker_command_utils.get_console")
def test_check_docker_context_command_failed(mock_get_console, mock_run_command):
mock_run_command.return_value.returncode = 1
check_docker_context(verbose=True)
@@ -258,7 +258,7 @@ def test_check_docker_context_command_failed(mock_get_console, mock_run_command)
capture_output=True,
)
mock_get_console.return_value.print.assert_called_with(
- '[warning]Could not check for Docker context.[/]\n'
+ "[warning]Could not check for Docker context.[/]\n"
'[warning]Please make sure that Docker is using the right context by running "docker info" and '
- 'checking the active Context.[/]'
+ "checking the active Context.[/]"
)
diff --git a/dev/breeze/tests/test_find_airflow_directory.py b/dev/breeze/tests/test_find_airflow_directory.py
index 8b70f92295..3182cb065e 100644
--- a/dev/breeze/tests/test_find_airflow_directory.py
+++ b/dev/breeze/tests/test_find_airflow_directory.py
@@ -31,7 +31,7 @@ def test_find_airflow_root_upwards_from_cwd(capsys):
sources = find_airflow_sources_root_to_operate_on()
assert sources == ACTUAL_AIRFLOW_SOURCES
output = str(capsys.readouterr().out)
- assert output == ''
+ assert output == ""
def test_find_airflow_root_upwards_from_file(capsys):
@@ -39,11 +39,11 @@ def test_find_airflow_root_upwards_from_file(capsys):
sources = find_airflow_sources_root_to_operate_on()
assert sources == ACTUAL_AIRFLOW_SOURCES
output = str(capsys.readouterr().out)
- assert output == ''
+ assert output == ""
-@mock.patch('airflow_breeze.utils.path_utils.AIRFLOW_CFG_FILE', "bad_name.cfg")
-@mock.patch('airflow_breeze.utils.path_utils.Path.cwd')
+@mock.patch("airflow_breeze.utils.path_utils.AIRFLOW_CFG_FILE", "bad_name.cfg")
+@mock.patch("airflow_breeze.utils.path_utils.Path.cwd")
def test_find_airflow_root_from_installation_dir(mock_cwd, capsys):
mock_cwd.return_value = ROOT_PATH
sources = find_airflow_sources_root_to_operate_on()
diff --git a/dev/breeze/tests/test_host_info_utils.py b/dev/breeze/tests/test_host_info_utils.py
index e4b7ab70c9..ba3256e8f7 100644
--- a/dev/breeze/tests/test_host_info_utils.py
+++ b/dev/breeze/tests/test_host_info_utils.py
@@ -18,7 +18,7 @@ from __future__ import annotations
from airflow_breeze.utils import host_info_utils
-SUPPORTED_OS = ['linux', 'darwin', 'windows']
+SUPPORTED_OS = ["linux", "darwin", "windows"]
def test_get_host_os():
diff --git a/dev/breeze/tests/test_pr_info.py b/dev/breeze/tests/test_pr_info.py
index 9cdc1b1bb7..23a971da5d 100644
--- a/dev/breeze/tests/test_pr_info.py
+++ b/dev/breeze/tests/test_pr_info.py
@@ -30,14 +30,14 @@ def test_pr_info():
json_string = (TEST_PR_INFO_DIR / "pr_github_context.json").read_text()
wi = workflow_info(json_string)
assert wi.pull_request_labels == [
- 'area:providers',
- 'area:dev-tools',
- 'area:logging',
- 'kind:documentation',
+ "area:providers",
+ "area:dev-tools",
+ "area:logging",
+ "kind:documentation",
]
assert wi.target_repo == "apache/airflow"
assert wi.head_repo == "test/airflow"
- assert wi.event_name == 'pull_request'
+ assert wi.event_name == "pull_request"
assert wi.pr_number == 26004
assert wi.get_runs_on() == "ubuntu-20.04"
assert wi.is_canary_run() == "false"
@@ -51,7 +51,7 @@ def test_push_info():
assert wi.pull_request_labels == []
assert wi.target_repo == "apache/airflow"
assert wi.head_repo == "apache/airflow"
- assert wi.event_name == 'push'
+ assert wi.event_name == "push"
assert wi.pr_number is None
assert wi.get_runs_on() == "ubuntu-20.04"
assert wi.is_canary_run() == "true"
@@ -65,7 +65,7 @@ def test_schedule():
assert wi.pull_request_labels == []
assert wi.target_repo == "apache/airflow"
assert wi.head_repo == "apache/airflow"
- assert wi.event_name == 'schedule'
+ assert wi.event_name == "schedule"
assert wi.pr_number is None
assert wi.get_runs_on() == "ubuntu-20.04"
assert wi.is_canary_run() == "false"
@@ -76,10 +76,10 @@ def test_runs_on_self_hosted():
with mock.patch.dict(os.environ, {"AIRFLOW_SELF_HOSTED_RUNNER": "true"}):
json_string = (TEST_PR_INFO_DIR / "simple_pr.json").read_text()
wi = workflow_info(json_string)
- assert wi.pull_request_labels == ['another']
+ assert wi.pull_request_labels == ["another"]
assert wi.target_repo == "apache/airflow"
assert wi.head_repo == "apache/airflow"
- assert wi.event_name == 'pull_request'
+ assert wi.event_name == "pull_request"
assert wi.pr_number == 1234
assert wi.get_runs_on() == "self-hosted"
assert wi.is_canary_run() == "false"
@@ -90,10 +90,10 @@ def test_runs_on_forced_public_runner():
with mock.patch.dict(os.environ, {"AIRFLOW_SELF_HOSTED_RUNNER": "true"}):
json_string = (TEST_PR_INFO_DIR / "self_hosted_forced_pr.json").read_text()
wi = workflow_info(json_string)
- assert wi.pull_request_labels == ['use public runners', 'another']
+ assert wi.pull_request_labels == ["use public runners", "another"]
assert wi.target_repo == "apache/airflow"
assert wi.head_repo == "apache/airflow"
- assert wi.event_name == 'pull_request'
+ assert wi.event_name == "pull_request"
assert wi.pr_number == 1234
assert wi.get_runs_on() == "ubuntu-20.04"
assert wi.is_canary_run() == "false"
@@ -104,10 +104,10 @@ def test_runs_on_simple_pr_other_repo():
with mock.patch.dict(os.environ, {"AIRFLOW_SELF_HOSTED_RUNNER": ""}):
json_string = (TEST_PR_INFO_DIR / "simple_pr_different_repo.json").read_text()
wi = workflow_info(json_string)
- assert wi.pull_request_labels == ['another']
+ assert wi.pull_request_labels == ["another"]
assert wi.target_repo == "apache/airflow"
assert wi.head_repo == "test/airflow"
- assert wi.event_name == 'pull_request'
+ assert wi.event_name == "pull_request"
assert wi.pr_number == 1234
assert wi.get_runs_on() == "ubuntu-20.04"
assert wi.is_canary_run() == "false"
@@ -121,7 +121,7 @@ def test_runs_on_push_other_branch():
assert wi.pull_request_labels == []
assert wi.target_repo == "apache/airflow"
assert wi.head_repo == "apache/airflow"
- assert wi.event_name == 'push'
+ assert wi.event_name == "push"
assert wi.pr_number is None
assert wi.get_runs_on() == "self-hosted"
assert wi.is_canary_run() == "false"
@@ -135,7 +135,7 @@ def test_runs_on_push_v_test_branch():
assert wi.pull_request_labels == []
assert wi.target_repo == "apache/airflow"
assert wi.head_repo == "apache/airflow"
- assert wi.event_name == 'push'
+ assert wi.event_name == "push"
assert wi.pr_number is None
assert wi.get_runs_on() == "self-hosted"
assert wi.is_canary_run() == "true"
diff --git a/dev/breeze/tests/test_run_utils.py b/dev/breeze/tests/test_run_utils.py
index a9dbe1e37e..78187f974a 100644
--- a/dev/breeze/tests/test_run_utils.py
+++ b/dev/breeze/tests/test_run_utils.py
@@ -29,8 +29,8 @@ from airflow_breeze.utils.run_utils import (
def test_change_file_permission(tmpdir):
- tmpfile = Path(tmpdir, 'test.config')
- tmpfile.write_text('content')
+ tmpfile = Path(tmpdir, "test.config")
+ tmpfile.write_text("content")
change_file_permission(tmpfile)
mode = os.stat(tmpfile).st_mode
assert not (mode & stat.S_IWGRP) and not (mode & stat.S_IWOTH)
@@ -48,7 +48,7 @@ def test_change_directory_permission(tmpdir):
def test_filter_out_none():
- dict_input_with_none = {'sample': None, 'sample1': 'One', 'sample2': 'Two', 'samplen': None}
- expected_dict_output = {'sample1': 'One', 'sample2': 'Two'}
+ dict_input_with_none = {"sample": None, "sample1": "One", "sample2": "Two", "samplen": None}
+ expected_dict_output = {"sample1": "One", "sample2": "Two"}
output_dict = filter_out_none(**dict_input_with_none)
TestCase().assertDictEqual(output_dict, expected_dict_output)
diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py
index e502bb627c..8ae602be2e 100644
--- a/dev/breeze/tests/test_selective_checks.py
+++ b/dev/breeze/tests/test_selective_checks.py
@@ -635,14 +635,14 @@ def test_no_commit_provided_trigger_full_build_for_any_event_type(github_event):
id="Setup.cfg changed",
),
pytest.param(
- ('airflow/providers/microsoft/azure/provider.yaml',),
+ ("airflow/providers/microsoft/azure/provider.yaml",),
{
"upgrade-to-newer-dependencies": "true",
},
id="Provider.yaml changed",
),
pytest.param(
- ('generated/provider_dependencies.json',),
+ ("generated/provider_dependencies.json",),
{
"upgrade-to-newer-dependencies": "true",
},
diff --git a/dev/chart/build_changelog_annotations.py b/dev/chart/build_changelog_annotations.py
index 34965d9270..259bd7dac4 100755
--- a/dev/chart/build_changelog_annotations.py
+++ b/dev/chart/build_changelog_annotations.py
@@ -59,7 +59,7 @@ PREFIXES_TO_STRIP = [
def parse_line(line: str) -> tuple[str | None, int | None]:
- match = re.search(r'^- (.*?)(?:\(#(\d+)\)){0,1}$', line)
+ match = re.search(r"^- (.*?)(?:\(#(\d+)\)){0,1}$", line)
if not match:
return None, None
desc, pr_number = match.groups()
@@ -96,7 +96,7 @@ with open("chart/RELEASE_NOTES.rst") as f:
break
in_first_release = True
continue
- if line.startswith('"""') or line.startswith('----') or line.startswith('^^^^'):
+ if line.startswith('"""') or line.startswith("----") or line.startswith("^^^^"):
continue
# Make sure we get past "significant features" before we actually start keeping track
@@ -106,7 +106,7 @@ with open("chart/RELEASE_NOTES.rst") as f:
past_significant_changes = True
continue
- if not line.startswith('- '):
+ if not line.startswith("- "):
section = line
continue
diff --git a/dev/check_files.py b/dev/check_files.py
index 548d2e41b4..8a013b3672 100644
--- a/dev/check_files.py
+++ b/dev/check_files.py
@@ -56,7 +56,7 @@ def get_packages() -> list[tuple[str, str]]:
with open("packages.txt") as file:
content = file.read()
except FileNotFoundError:
- content = ''
+ content = ""
if not content:
raise SystemExit("List of packages to check is empty. Please add packages to `packages.txt`")
@@ -105,11 +105,11 @@ def check_providers(files: list[str]):
def strip_rc_suffix(version):
- return re.sub(r'rc\d+$', '', version)
+ return re.sub(r"rc\d+$", "", version)
def print_status(file, is_found: bool):
- color, status = ('green', 'OK') if is_found else ('red', 'MISSING')
+ color, status = ("green", "OK") if is_found else ("red", "MISSING")
print(f" - {file}: [{color}]{status}[/{color}]")
@@ -138,7 +138,7 @@ def check_release(files: list[str], version: str):
def expand_name_variations(files):
- return list(sorted(base + suffix for base, suffix in product(files, ['', '.asc', '.sha512'])))
+ return list(sorted(base + suffix for base, suffix in product(files, ["", ".asc", ".sha512"])))
def check_upgrade_check(files: list[str], version: str):
@@ -245,31 +245,31 @@ if __name__ == "__main__":
def test_check_release_pass():
"""Passes if all present"""
files = [
- 'apache_airflow-2.2.1-py3-none-any.whl',
- 'apache_airflow-2.2.1-py3-none-any.whl.asc',
- 'apache_airflow-2.2.1-py3-none-any.whl.sha512',
- 'apache-airflow-2.2.1-source.tar.gz',
- 'apache-airflow-2.2.1-source.tar.gz.asc',
- 'apache-airflow-2.2.1-source.tar.gz.sha512',
- 'apache-airflow-2.2.1.tar.gz',
- 'apache-airflow-2.2.1.tar.gz.asc',
- 'apache-airflow-2.2.1.tar.gz.sha512',
+ "apache_airflow-2.2.1-py3-none-any.whl",
+ "apache_airflow-2.2.1-py3-none-any.whl.asc",
+ "apache_airflow-2.2.1-py3-none-any.whl.sha512",
+ "apache-airflow-2.2.1-source.tar.gz",
+ "apache-airflow-2.2.1-source.tar.gz.asc",
+ "apache-airflow-2.2.1-source.tar.gz.sha512",
+ "apache-airflow-2.2.1.tar.gz",
+ "apache-airflow-2.2.1.tar.gz.asc",
+ "apache-airflow-2.2.1.tar.gz.sha512",
]
- assert check_release(files, version='2.2.1rc2') == []
+ assert check_release(files, version="2.2.1rc2") == []
def test_check_release_fail():
"""Fails if missing one"""
files = [
- 'apache_airflow-2.2.1-py3-none-any.whl',
- 'apache_airflow-2.2.1-py3-none-any.whl.asc',
- 'apache_airflow-2.2.1-py3-none-any.whl.sha512',
- 'apache-airflow-2.2.1-source.tar.gz',
- 'apache-airflow-2.2.1-source.tar.gz.asc',
- 'apache-airflow-2.2.1-source.tar.gz.sha512',
- 'apache-airflow-2.2.1.tar.gz.asc',
- 'apache-airflow-2.2.1.tar.gz.sha512',
+ "apache_airflow-2.2.1-py3-none-any.whl",
+ "apache_airflow-2.2.1-py3-none-any.whl.asc",
+ "apache_airflow-2.2.1-py3-none-any.whl.sha512",
+ "apache-airflow-2.2.1-source.tar.gz",
+ "apache-airflow-2.2.1-source.tar.gz.asc",
+ "apache-airflow-2.2.1-source.tar.gz.sha512",
+ "apache-airflow-2.2.1.tar.gz.asc",
+ "apache-airflow-2.2.1.tar.gz.sha512",
]
- missing_files = check_release(files, version='2.2.1rc2')
- assert missing_files == ['apache-airflow-2.2.1.tar.gz']
+ missing_files = check_release(files, version="2.2.1rc2")
+ assert missing_files == ["apache-airflow-2.2.1.tar.gz"]
diff --git a/dev/deprecations/generate_deprecated_dicts.py b/dev/deprecations/generate_deprecated_dicts.py
index 8fbe8290a3..b705fee48b 100644
--- a/dev/deprecations/generate_deprecated_dicts.py
+++ b/dev/deprecations/generate_deprecated_dicts.py
@@ -43,16 +43,16 @@ def black_mode():
config = parse_pyproject_toml(os.path.join(AIRFLOW_SOURCES_ROOT, "pyproject.toml"))
target_versions = set(
- target_version_option_callback(None, None, tuple(config.get('target_version', ()))),
+ target_version_option_callback(None, None, tuple(config.get("target_version", ()))),
)
return Mode(
target_versions=target_versions,
- line_length=config.get('line_length', Mode.line_length),
- is_pyi=bool(config.get('is_pyi', Mode.is_pyi)),
- string_normalization=not bool(config.get('skip_string_normalization', not Mode.string_normalization)),
+ line_length=config.get("line_length", Mode.line_length),
+ is_pyi=bool(config.get("is_pyi", Mode.is_pyi)),
+ string_normalization=not bool(config.get("skip_string_normalization", not Mode.string_normalization)),
experimental_string_processing=bool(
- config.get('experimental_string_processing', Mode.experimental_string_processing)
+ config.get("experimental_string_processing", Mode.experimental_string_processing)
),
)
@@ -81,7 +81,7 @@ def get_imports(path: Path):
if isinstance(node, ast.Import):
module_array: list[str] = []
elif isinstance(node, ast.ImportFrom) and node.module:
- module_array = node.module.split('.')
+ module_array = node.module.split(".")
elif isinstance(node, ast.ClassDef):
for base in node.bases:
res = imports.get(base.id) # type: ignore[attr-defined]
@@ -110,90 +110,90 @@ __deprecated_classes = {
"""
DEPRECATED_MODULES = [
- 'airflow/hooks/base_hook.py',
- 'airflow/hooks/dbapi_hook.py',
- 'airflow/hooks/docker_hook.py',
- 'airflow/hooks/druid_hook.py',
- 'airflow/hooks/hdfs_hook.py',
- 'airflow/hooks/hive_hooks.py',
- 'airflow/hooks/http_hook.py',
- 'airflow/hooks/jdbc_hook.py',
- 'airflow/hooks/mssql_hook.py',
- 'airflow/hooks/mysql_hook.py',
- 'airflow/hooks/oracle_hook.py',
- 'airflow/hooks/pig_hook.py',
- 'airflow/hooks/postgres_hook.py',
- 'airflow/hooks/presto_hook.py',
- 'airflow/hooks/S3_hook.py',
- 'airflow/hooks/samba_hook.py',
- 'airflow/hooks/slack_hook.py',
- 'airflow/hooks/sqlite_hook.py',
- 'airflow/hooks/webhdfs_hook.py',
- 'airflow/hooks/zendesk_hook.py',
- 'airflow/operators/bash_operator.py',
- 'airflow/operators/branch_operator.py',
- 'airflow/operators/check_operator.py',
- 'airflow/operators/dagrun_operator.py',
- 'airflow/operators/docker_operator.py',
- 'airflow/operators/druid_check_operator.py',
- 'airflow/operators/dummy.py',
- 'airflow/operators/dummy_operator.py',
- 'airflow/operators/email_operator.py',
- 'airflow/operators/gcs_to_s3.py',
- 'airflow/operators/google_api_to_s3_transfer.py',
- 'airflow/operators/hive_operator.py',
- 'airflow/operators/hive_stats_operator.py',
- 'airflow/operators/hive_to_druid.py',
- 'airflow/operators/hive_to_mysql.py',
- 'airflow/operators/hive_to_samba_operator.py',
- 'airflow/operators/http_operator.py',
- 'airflow/operators/jdbc_operator.py',
- 'airflow/operators/latest_only_operator.py',
- 'airflow/operators/mssql_operator.py',
- 'airflow/operators/mssql_to_hive.py',
- 'airflow/operators/mysql_operator.py',
- 'airflow/operators/mysql_to_hive.py',
- 'airflow/operators/oracle_operator.py',
- 'airflow/operators/papermill_operator.py',
- 'airflow/operators/pig_operator.py',
- 'airflow/operators/postgres_operator.py',
- 'airflow/operators/presto_check_operator.py',
- 'airflow/operators/presto_to_mysql.py',
- 'airflow/operators/python_operator.py',
- 'airflow/operators/redshift_to_s3_operator.py',
- 'airflow/operators/s3_file_transform_operator.py',
- 'airflow/operators/s3_to_hive_operator.py',
- 'airflow/operators/s3_to_redshift_operator.py',
- 'airflow/operators/slack_operator.py',
- 'airflow/operators/sql.py',
- 'airflow/operators/sql_branch_operator.py',
- 'airflow/operators/sqlite_operator.py',
- 'airflow/operators/subdag_operator.py',
- 'airflow/sensors/base_sensor_operator.py',
- 'airflow/sensors/date_time_sensor.py',
- 'airflow/sensors/external_task_sensor.py',
- 'airflow/sensors/hdfs_sensor.py',
- 'airflow/sensors/hive_partition_sensor.py',
- 'airflow/sensors/http_sensor.py',
- 'airflow/sensors/metastore_partition_sensor.py',
- 'airflow/sensors/named_hive_partition_sensor.py',
- 'airflow/sensors/s3_key_sensor.py',
- 'airflow/sensors/sql.py',
- 'airflow/sensors/sql_sensor.py',
- 'airflow/sensors/time_delta_sensor.py',
- 'airflow/sensors/web_hdfs_sensor.py',
- 'airflow/utils/log/cloudwatch_task_handler.py',
- 'airflow/utils/log/es_task_handler.py',
- 'airflow/utils/log/gcs_task_handler.py',
- 'airflow/utils/log/s3_task_handler.py',
- 'airflow/utils/log/stackdriver_task_handler.py',
- 'airflow/utils/log/wasb_task_handler.py',
+ "airflow/hooks/base_hook.py",
+ "airflow/hooks/dbapi_hook.py",
+ "airflow/hooks/docker_hook.py",
+ "airflow/hooks/druid_hook.py",
+ "airflow/hooks/hdfs_hook.py",
+ "airflow/hooks/hive_hooks.py",
+ "airflow/hooks/http_hook.py",
+ "airflow/hooks/jdbc_hook.py",
+ "airflow/hooks/mssql_hook.py",
+ "airflow/hooks/mysql_hook.py",
+ "airflow/hooks/oracle_hook.py",
+ "airflow/hooks/pig_hook.py",
+ "airflow/hooks/postgres_hook.py",
+ "airflow/hooks/presto_hook.py",
+ "airflow/hooks/S3_hook.py",
+ "airflow/hooks/samba_hook.py",
+ "airflow/hooks/slack_hook.py",
+ "airflow/hooks/sqlite_hook.py",
+ "airflow/hooks/webhdfs_hook.py",
+ "airflow/hooks/zendesk_hook.py",
+ "airflow/operators/bash_operator.py",
+ "airflow/operators/branch_operator.py",
+ "airflow/operators/check_operator.py",
+ "airflow/operators/dagrun_operator.py",
+ "airflow/operators/docker_operator.py",
+ "airflow/operators/druid_check_operator.py",
+ "airflow/operators/dummy.py",
+ "airflow/operators/dummy_operator.py",
+ "airflow/operators/email_operator.py",
+ "airflow/operators/gcs_to_s3.py",
+ "airflow/operators/google_api_to_s3_transfer.py",
+ "airflow/operators/hive_operator.py",
+ "airflow/operators/hive_stats_operator.py",
+ "airflow/operators/hive_to_druid.py",
+ "airflow/operators/hive_to_mysql.py",
+ "airflow/operators/hive_to_samba_operator.py",
+ "airflow/operators/http_operator.py",
+ "airflow/operators/jdbc_operator.py",
+ "airflow/operators/latest_only_operator.py",
+ "airflow/operators/mssql_operator.py",
+ "airflow/operators/mssql_to_hive.py",
+ "airflow/operators/mysql_operator.py",
+ "airflow/operators/mysql_to_hive.py",
+ "airflow/operators/oracle_operator.py",
+ "airflow/operators/papermill_operator.py",
+ "airflow/operators/pig_operator.py",
+ "airflow/operators/postgres_operator.py",
+ "airflow/operators/presto_check_operator.py",
+ "airflow/operators/presto_to_mysql.py",
+ "airflow/operators/python_operator.py",
+ "airflow/operators/redshift_to_s3_operator.py",
+ "airflow/operators/s3_file_transform_operator.py",
+ "airflow/operators/s3_to_hive_operator.py",
+ "airflow/operators/s3_to_redshift_operator.py",
+ "airflow/operators/slack_operator.py",
+ "airflow/operators/sql.py",
+ "airflow/operators/sql_branch_operator.py",
+ "airflow/operators/sqlite_operator.py",
+ "airflow/operators/subdag_operator.py",
+ "airflow/sensors/base_sensor_operator.py",
+ "airflow/sensors/date_time_sensor.py",
+ "airflow/sensors/external_task_sensor.py",
+ "airflow/sensors/hdfs_sensor.py",
+ "airflow/sensors/hive_partition_sensor.py",
+ "airflow/sensors/http_sensor.py",
+ "airflow/sensors/metastore_partition_sensor.py",
+ "airflow/sensors/named_hive_partition_sensor.py",
+ "airflow/sensors/s3_key_sensor.py",
+ "airflow/sensors/sql.py",
+ "airflow/sensors/sql_sensor.py",
+ "airflow/sensors/time_delta_sensor.py",
+ "airflow/sensors/web_hdfs_sensor.py",
+ "airflow/utils/log/cloudwatch_task_handler.py",
+ "airflow/utils/log/es_task_handler.py",
+ "airflow/utils/log/gcs_task_handler.py",
+ "airflow/utils/log/s3_task_handler.py",
+ "airflow/utils/log/stackdriver_task_handler.py",
+ "airflow/utils/log/wasb_task_handler.py",
]
CONTRIB_FILES = (AIRFLOW_SOURCES_ROOT / "airflow" / "contrib").rglob("*.py")
-if __name__ == '__main__':
+if __name__ == "__main__":
console = Console(color_system="standard", width=300)
all_deprecated_imports: dict[str, dict[str, list[Import]]] = defaultdict(lambda: defaultdict(list))
# delete = True
@@ -206,7 +206,7 @@ if __name__ == '__main__':
original_module = os.fspath(file_path.parent.relative_to(AIRFLOW_SOURCES_ROOT)).replace(os.sep, ".")
for _import in get_imports(file_path):
module_name = file_path.name[: -len(".py")]
- if _import.name not in ['warnings', 'RemovedInAirflow3Warning']:
+ if _import.name not in ["warnings", "RemovedInAirflow3Warning"]:
all_deprecated_imports[original_module][module_name].append(_import)
if delete:
file_path.unlink()
diff --git a/dev/example_dags/update_example_dags_paths.py b/dev/example_dags/update_example_dags_paths.py
index a31e10e7a5..08251cd213 100755
--- a/dev/example_dags/update_example_dags_paths.py
+++ b/dev/example_dags/update_example_dags_paths.py
@@ -85,7 +85,7 @@ def replace_match(file: str, line: str, provider: str, version: str) -> str | No
console.print(line)
return None
if line != new_line:
- console.print(f'[yellow] Replacing in {file}[/]\n{line.strip()}\n{new_line.strip()}')
+ console.print(f"[yellow] Replacing in {file}[/]\n{line.strip()}\n{new_line.strip()}")
return new_line
return line
@@ -100,20 +100,20 @@ def find_matches(_file: Path, provider: str, version: str):
_file.write_text("".join(new_lines))
-if __name__ == '__main__':
+if __name__ == "__main__":
curdir = Path(os.curdir).resolve()
dirs = list(filter(os.path.isdir, curdir.iterdir()))
with Progress(console=console) as progress:
task = progress.add_task(f"Updating {len(dirs)}", total=len(dirs))
for directory in dirs:
- if directory.name.startswith('apache-airflow-providers-'):
- provider = directory.name[len('apache-airflow-providers-') :]
+ if directory.name.startswith("apache-airflow-providers-"):
+ provider = directory.name[len("apache-airflow-providers-") :]
console.print(f"[bright_blue] Processing {directory}")
version_dirs = list(filter(os.path.isdir, directory.iterdir()))
for version_dir in version_dirs:
version = version_dir.name
console.print(version)
- for file_name in ["index.html", 'example-dags.html']:
+ for file_name in ["index.html", "example-dags.html"]:
candidate_file = version_dir / file_name
if candidate_file.exists():
find_matches(candidate_file, provider, version)
diff --git a/dev/prepare_bulk_issues.py b/dev/prepare_bulk_issues.py
index 14fec991fc..d780e31ad9 100755
--- a/dev/prepare_bulk_issues.py
+++ b/dev/prepare_bulk_issues.py
@@ -46,7 +46,7 @@ SOURCE_DIR_PATH = os.path.abspath(os.path.join(MY_DIR_PATH, os.pardir))
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-@click.group(context_settings={'help_option_names': ['-h', '--help'], 'max_content_width': 500})
+@click.group(context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 500})
def cli():
...
@@ -116,7 +116,7 @@ option_github_token = click.option(
Can be generated with:
https://github.com/settings/tokens/new?description=Write%20issues&scopes=repo:status,public_repo"""
),
- envvar='GITHUB_TOKEN',
+ envvar="GITHUB_TOKEN",
)
option_verbose = click.option(
diff --git a/dev/prepare_release_issue.py b/dev/prepare_release_issue.py
index fb37b1f02d..9811d6051e 100755
--- a/dev/prepare_release_issue.py
+++ b/dev/prepare_release_issue.py
@@ -42,7 +42,7 @@ PR_PATTERN = re.compile(r".*\(#([0-9]+)\)")
ISSUE_MATCH_IN_BODY = re.compile(r" #([0-9]+)[^0-9]")
-@click.group(context_settings={'help_option_names': ['-h', '--help'], 'max_content_width': 500})
+@click.group(context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 500})
def cli():
...
@@ -78,11 +78,11 @@ option_github_token = click.option(
Can be generated with:
https://github.com/settings/tokens/new?description=Read%20sssues&scopes=repo:status"""
),
- envvar='GITHUB_TOKEN',
+ envvar="GITHUB_TOKEN",
)
option_excluded_pr_list = click.option(
- "--excluded-pr-list", type=str, default='', help="Coma-separated list of PRs to exclude from the issue."
+ "--excluded-pr-list", type=str, default="", help="Coma-separated list of PRs to exclude from the issue."
)
option_limit_pr_count = click.option(
@@ -123,9 +123,9 @@ def get_git_log_command(
elif from_commit:
git_cmd.append(from_commit)
if is_helm_chart:
- git_cmd.extend(['--', 'chart/'])
+ git_cmd.extend(["--", "chart/"])
else:
- git_cmd.extend(['--', '.'])
+ git_cmd.extend(["--", "."])
if verbose:
console.print(f"Command to run: '{' '.join(git_cmd)}'")
return git_cmd
@@ -154,7 +154,7 @@ def get_change_from_line(line: str):
short_hash=split_line[1],
date=split_line[2],
message=message,
- message_without_backticks=message.replace("`", "'").replace("'", "'").replace('&', "&"),
+ message_without_backticks=message.replace("`", "'").replace("'", "'").replace("&", "&"),
pr=int(pr) if pr else None,
)
@@ -220,16 +220,16 @@ def print_issue_content(
all_users.update(user_list)
all_user_logins = "@" + " @".join(all_users)
content = render_template(
- template_name='ISSUE',
+ template_name="ISSUE",
context={
- 'link': link,
- 'link_text': link_text,
- 'pr_list': pr_list,
- 'pull_requests': pull_requests,
- 'linked_issues': linked_issues,
- 'users': users,
- 'user_logins': user_logins,
- 'all_user_logins': all_user_logins,
+ "link": link,
+ "link_text": link_text,
+ "pr_list": pr_list,
+ "pull_requests": pull_requests,
+ "linked_issues": linked_issues,
+ "users": users,
+ "user_logins": user_logins,
+ "all_user_logins": all_user_logins,
},
autoescape=False,
keep_trailing_newline=True,
diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index 80bbb0aaf6..e47756dc65 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -145,7 +145,7 @@ class EntityType(Enum):
Secrets = "Secrets"
-@click.group(context_settings={'help_option_names': ['-h', '--help'], 'max_content_width': 500})
+@click.group(context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 500})
def cli():
...
@@ -158,19 +158,19 @@ option_skip_tag_check = click.option(
)
option_git_update = click.option(
- '--git-update/--no-git-update',
+ "--git-update/--no-git-update",
default=True,
is_flag=True,
help=f"If the git remote {HTTPS_REMOTE} already exists, don't try to update it",
)
option_package_format = click.option(
- '--package-format',
+ "--package-format",
type=click.Choice(["wheel", "sdist", "both"]),
- help='Format of packages.',
+ help="Format of packages.",
default="wheel",
show_default=True,
- envvar='PACKAGE_FORMAT',
+ envvar="PACKAGE_FORMAT",
)
option_version_suffix = click.option(
@@ -192,9 +192,9 @@ option_force = click.option(
is_flag=True,
help="Forces regeneration of already generated documentation",
)
-argument_package_id = click.argument('package_id')
-argument_changelog_files = click.argument('changelog_files', nargs=-1)
-argument_package_ids = click.argument('package_ids', nargs=-1)
+argument_package_id = click.argument("package_id")
+argument_changelog_files = click.argument("changelog_files", nargs=-1)
+argument_package_ids = click.argument("package_ids", nargs=-1)
@contextmanager
@@ -206,8 +206,8 @@ def with_group(title: str) -> Generator[None, None, None]:
For more information, see:
https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#grouping-log-lines
"""
- if os.environ.get('GITHUB_ACTIONS', 'false') != "true":
- console.print("#" * 10 + ' [bright_blue]' + title + '[/] ' + "#" * 10)
+ if os.environ.get("GITHUB_ACTIONS", "false") != "true":
+ console.print("#" * 10 + " [bright_blue]" + title + "[/] " + "#" * 10)
yield
return
console.print(f"::group::[bright_blue]{title}[/]")
@@ -291,7 +291,7 @@ def get_long_description(provider_package_id: str) -> str:
readme_file = os.path.join(package_folder, "README.md")
if not os.path.exists(readme_file):
return ""
- with open(readme_file, encoding='utf-8') as file:
+ with open(readme_file, encoding="utf-8") as file:
readme_contents = file.read()
copying = True
long_description = ""
@@ -352,18 +352,18 @@ def get_package_extras(provider_package_id: str) -> dict[str, list[str]]:
:param provider_package_id: id of the package
"""
- if provider_package_id == 'providers':
+ if provider_package_id == "providers":
return {}
extras_dict: dict[str, list[str]] = {
module: [get_pip_package_name(module)]
for module in ALL_DEPENDENCIES[provider_package_id][CROSS_PROVIDERS_DEPS]
}
provider_yaml_dict = get_provider_yaml(provider_package_id)
- additional_extras = provider_yaml_dict.get('additional-extras')
+ additional_extras = provider_yaml_dict.get("additional-extras")
if additional_extras:
for entry in additional_extras:
- name = entry['name']
- dependencies = entry['dependencies']
+ name = entry["name"]
+ dependencies = entry["dependencies"]
if name in extras_dict:
# remove non-versioned dependencies if versioned ones are coming
existing_dependencies = set(extras_dict[name])
@@ -509,7 +509,7 @@ def convert_pip_requirements_to_table(requirements: Iterable[str], markdown: boo
package = found.group(1)
version_required = found.group(2)
if version_required != "":
- version_required = f"`{version_required}`" if markdown else f'``{version_required}``'
+ version_required = f"`{version_required}`" if markdown else f"``{version_required}``"
table_data.append((f"`{package}`" if markdown else f"``{package}``", version_required))
else:
table_data.append((dependency, ""))
@@ -635,11 +635,11 @@ def check_if_release_version_ok(
:return: Tuple of current/previous_release (previous might be None if there are no releases)
"""
previous_release_version = past_releases[0].release_version if past_releases else None
- if current_release_version == '':
+ if current_release_version == "":
if previous_release_version:
current_release_version = previous_release_version
else:
- current_release_version = (datetime.today() + timedelta(days=5)).strftime('%Y.%m.%d')
+ current_release_version = (datetime.today() + timedelta(days=5)).strftime("%Y.%m.%d")
if previous_release_version:
if Version(current_release_version) < Version(previous_release_version):
console.print(
@@ -714,7 +714,7 @@ def make_sure_remote_apache_exists_and_fetch(git_update: bool, verbose: bool):
console.print("This might override your local tags!")
is_shallow_repo = (
subprocess.check_output(["git", "rev-parse", "--is-shallow-repository"], stderr=subprocess.DEVNULL)
- == 'true'
+ == "true"
)
fetch_command = ["git", "fetch", "--tags", "--force", HTTPS_REMOTE]
if is_shallow_repo:
@@ -732,10 +732,10 @@ def make_sure_remote_apache_exists_and_fetch(git_update: bool, verbose: bool):
)
except subprocess.CalledProcessError as e:
console.print(
- '[yellow]Error when fetching tags from remote. Your tags might not be refreshed. '
+ "[yellow]Error when fetching tags from remote. Your tags might not be refreshed. "
f'Please refresh the tags manually via {" ".join(fetch_command)}\n'
)
- console.print(f'[yellow]The error was: {e}')
+ console.print(f"[yellow]The error was: {e}")
def get_git_log_command(
@@ -758,7 +758,7 @@ def get_git_log_command(
git_cmd.append(f"{from_commit}...{to_commit}")
elif from_commit:
git_cmd.append(from_commit)
- git_cmd.extend(['--', '.'])
+ git_cmd.extend(["--", "."])
if verbose:
console.print(f"Command to run: '{' '.join(git_cmd)}'")
return git_cmd
@@ -885,9 +885,9 @@ def get_provider_info_from_provider_yaml(provider_package_id: str) -> dict[str,
return provider_yaml_dict
-def get_version_tag(version: str, provider_package_id: str, version_suffix: str = ''):
+def get_version_tag(version: str, provider_package_id: str, version_suffix: str = ""):
if version_suffix is None:
- version_suffix = ''
+ version_suffix = ""
return f"providers-{provider_package_id.replace('.','-')}/{version}{version_suffix}"
@@ -976,7 +976,7 @@ def get_all_changes_for_package(
else:
console.print(f"New version of the '{provider_package_id}' package is ready to be released!\n")
next_version_tag = HEAD_OF_HTTPS_REMOTE
- changes_table = ''
+ changes_table = ""
current_version = provider_details.versions[0]
list_of_list_of_changes: list[list[Change]] = []
for version in provider_details.versions[1:]:
@@ -1015,15 +1015,15 @@ def get_provider_details(provider_package_id: str) -> ProviderPackageDetails:
pypi_package_name=f"apache-airflow-providers-{provider_package_id.replace('.', '-')}",
source_provider_package_path=get_source_package_path(provider_package_id),
documentation_provider_package_path=get_documentation_package_path(provider_package_id),
- provider_description=provider_info['description'],
- versions=provider_info['versions'],
+ provider_description=provider_info["description"],
+ versions=provider_info["versions"],
excluded_python_versions=provider_info.get("excluded-python-versions") or [],
)
def get_provider_requirements(provider_package_id: str) -> list[str]:
provider_yaml = get_provider_yaml(provider_package_id)
- return provider_yaml['dependencies']
+ return provider_yaml["dependencies"]
def get_provider_jinja_context(
@@ -1068,7 +1068,7 @@ def get_provider_jinja_context(
"PROVIDER_PATH": provider_details.full_package_name.replace(".", "/"),
"RELEASE": current_release_version,
"RELEASE_NO_LEADING_ZEROS": release_version_no_leading_zeros,
- "VERSION_SUFFIX": version_suffix or '',
+ "VERSION_SUFFIX": version_suffix or "",
"ADDITIONAL_INFO": get_additional_package_info(
provider_package_path=provider_details.source_provider_package_path
),
@@ -1116,7 +1116,7 @@ def confirm(message: str, answer: str | None = None) -> bool:
"""
given_answer = answer.lower() if answer is not None else ""
while given_answer not in ["y", "n", "q", "yes", "no", "quit"]:
- console.print(f"[yellow]{message}[y/n/q]?[/] ", end='')
+ console.print(f"[yellow]{message}[y/n/q]?[/] ", end="")
try:
given_answer = input("").lower()
except KeyboardInterrupt:
@@ -1156,12 +1156,12 @@ def get_type_of_changes(answer: str | None) -> TypeOfChange:
console.print(
"[yellow]Type of change (d)ocumentation, (b)ugfix, (f)eature, (x)breaking "
"change, (s)kip, (q)uit [d/b/f/x/s/q]?[/] ",
- end='',
+ end="",
)
try:
given_answer = input("").lower()
except KeyboardInterrupt:
- given_answer = 'q'
+ given_answer = "q"
if given_answer == "q":
# Returns 65 in case user decided to quit
sys.exit(65)
@@ -1194,7 +1194,7 @@ def add_new_version(type_of_change: TypeOfChange, provider_package_id: str):
v = v.bump_patch()
provider_yaml_path = Path(get_source_package_path(provider_package_id)) / "provider.yaml"
original_text = provider_yaml_path.read_text()
- new_text = re.sub(r'versions:', f'versions:\n - {v}', original_text, 1)
+ new_text = re.sub(r"versions:", f"versions:\n - {v}", original_text, 1)
provider_yaml_path.write_text(new_text)
console.print()
console.print(f"[bright_blue]Bumped version to {v}")
@@ -1324,7 +1324,7 @@ def update_index_rst(
target_path,
):
index_update = render_template(
- template_name="PROVIDER_INDEX", context=context, extension='.rst', keep_trailing_newline=True
+ template_name="PROVIDER_INDEX", context=context, extension=".rst", keep_trailing_newline=True
)
index_file_path = os.path.join(target_path, "index.rst")
old_text = ""
@@ -1347,7 +1347,7 @@ def update_commits_rst(
target_path,
):
new_text = render_template(
- template_name="PROVIDER_COMMITS", context=context, extension='.rst', keep_trailing_newline=True
+ template_name="PROVIDER_COMMITS", context=context, extension=".rst", keep_trailing_newline=True
)
index_file_path = os.path.join(target_path, "commits.rst")
old_text = ""
@@ -1364,16 +1364,16 @@ def black_mode():
config = parse_pyproject_toml(os.path.join(AIRFLOW_SOURCES_ROOT_PATH, "pyproject.toml"))
target_versions = set(
- target_version_option_callback(None, None, tuple(config.get('target_version', ()))),
+ target_version_option_callback(None, None, tuple(config.get("target_version", ()))),
)
return Mode(
target_versions=target_versions,
- line_length=config.get('line_length', Mode.line_length),
- is_pyi=bool(config.get('is_pyi', Mode.is_pyi)),
- string_normalization=not bool(config.get('skip_string_normalization', not Mode.string_normalization)),
+ line_length=config.get("line_length", Mode.line_length),
+ is_pyi=bool(config.get("is_pyi", Mode.is_pyi)),
+ string_normalization=not bool(config.get("skip_string_normalization", not Mode.string_normalization)),
experimental_string_processing=bool(
- config.get('experimental_string_processing', Mode.experimental_string_processing)
+ config.get("experimental_string_processing", Mode.experimental_string_processing)
),
)
@@ -1388,7 +1388,7 @@ def prepare_setup_py_file(context):
setup_py_template_name = "SETUP"
setup_py_file_path = os.path.abspath(os.path.join(get_target_folder(), "setup.py"))
setup_py_content = render_template(
- template_name=setup_py_template_name, context=context, extension='.py', autoescape=False
+ template_name=setup_py_template_name, context=context, extension=".py", autoescape=False
)
with open(setup_py_file_path, "wt") as setup_py_file:
setup_py_file.write(black_format(setup_py_content))
@@ -1400,7 +1400,7 @@ def prepare_setup_cfg_file(context):
setup_cfg_content = render_template(
template_name=setup_cfg_template_name,
context=context,
- extension='.cfg',
+ extension=".cfg",
autoescape=False,
keep_trailing_newline=True,
)
@@ -1419,7 +1419,7 @@ def prepare_get_provider_info_py_file(context, provider_package_id: str):
get_provider_content = render_template(
template_name=get_provider_template_name,
context=context,
- extension='.py',
+ extension=".py",
autoescape=False,
keep_trailing_newline=True,
)
@@ -1432,7 +1432,7 @@ def prepare_manifest_in_file(context):
content = render_template(
template_name="MANIFEST",
context=context,
- extension='.in',
+ extension=".in",
autoescape=False,
keep_trailing_newline=True,
)
@@ -1496,9 +1496,9 @@ def list_providers_packages():
@click.option(
"-a",
"--answer",
- type=click.Choice(['y', 'n', 'q', 'yes', 'no', 'quit']),
+ type=click.Choice(["y", "n", "q", "yes", "no", "quit"]),
help="Force answer to questions.",
- envvar='ANSWER',
+ envvar="ANSWER",
)
def update_package_documentation(
version_suffix: str,
@@ -1574,7 +1574,7 @@ def get_current_tag(provider_package_id: str, suffix: str, git_update: bool, ver
verify_provider_package(provider_package_id)
make_sure_remote_apache_exists_and_fetch(git_update, verbose)
provider_info = get_provider_info_from_provider_yaml(provider_package_id)
- versions: list[str] = provider_info['versions']
+ versions: list[str] = provider_info["versions"]
current_version = versions[0]
current_tag = get_version_tag(current_version, provider_package_id, suffix)
return current_tag
@@ -1654,10 +1654,10 @@ def build_provider_packages(
console.print(f"Building provider package: {provider_package} in format {package_format}")
command: list[str] = ["python3", "setup.py", "build", "--build-temp", tmp_build_dir]
if version_suffix is not None:
- command.extend(['egg_info', '--tag-build', version_suffix])
- if package_format in ['sdist', 'both']:
+ command.extend(["egg_info", "--tag-build", version_suffix])
+ if package_format in ["sdist", "both"]:
command.append("sdist")
- if package_format in ['wheel', 'both']:
+ if package_format in ["wheel", "both"]:
command.extend(["bdist_wheel", "--bdist-dir", tmp_dist_dir])
console.print(f"Executing command: '{' '.join(command)}'")
try:
@@ -1689,7 +1689,7 @@ def find_insertion_index_for_version(content: list[str], version: str) -> tuple[
if not changelog_found and line.strip() == version:
changelog_found = True
skip_next_line = True
- elif not skip_next_line and line and all(char == '.' for char in line):
+ elif not skip_next_line and line and all(char == "." for char in line):
return index - 2, changelog_found
else:
skip_next_line = False
@@ -1752,7 +1752,7 @@ def _update_changelog(package_id: str, verbose: bool) -> bool:
provider_info=provider_info,
provider_details=provider_details,
current_release_version=current_release_version,
- version_suffix='',
+ version_suffix="",
)
changelog_path = os.path.join(provider_details.source_provider_package_path, "CHANGELOG.rst")
proceed, changes, _ = get_all_changes_for_package(
@@ -1796,7 +1796,7 @@ def generate_new_changelog(package_id, provider_details, changelog_path, changes
return
context = {"new_changes": new_changes}
generated_new_changelog = render_template(
- template_name='UPDATE_CHANGELOG', context=context, extension=".rst"
+ template_name="UPDATE_CHANGELOG", context=context, extension=".rst"
)
else:
classified_changes = get_changes_classified(changes[0])
@@ -1806,7 +1806,7 @@ def generate_new_changelog(package_id, provider_details, changelog_path, changes
"classified_changes": classified_changes,
}
generated_new_changelog = render_template(
- template_name='CHANGELOG', context=context, extension=".rst"
+ template_name="CHANGELOG", context=context, extension=".rst"
)
new_changelog_lines = current_changelog_lines[0:insertion_index]
new_changelog_lines.extend(generated_new_changelog.splitlines())
@@ -1830,11 +1830,11 @@ def generate_new_changelog(package_id, provider_details, changelog_path, changes
def get_package_from_changelog(changelog_path: str):
folder = Path(changelog_path).parent
- package = ''
- separator = ''
- while not os.path.basename(folder) == 'providers':
+ package = ""
+ separator = ""
+ while not os.path.basename(folder) == "providers":
package = os.path.basename(folder) + separator + package
- separator = '.'
+ separator = "."
folder = Path(folder).parent
return package
@@ -1873,10 +1873,10 @@ def get_prs_for_package(package_id: str) -> list[int]:
skip_line = True
continue
if extract_prs:
- if len(line) > 1 and all(c == '.' for c in line.strip()):
+ if len(line) > 1 and all(c == "." for c in line.strip()):
# Header for next version reached
break
- if line.startswith('.. Below changes are excluded from the changelog'):
+ if line.startswith(".. Below changes are excluded from the changelog"):
# The reminder of PRs is not important skipping it
break
match_result = pr_matcher.match(line.strip())
@@ -1905,8 +1905,8 @@ def is_package_in_dist(dist_files: list[str], package: str) -> bool:
@cli.command()
@click.option(
- '--github-token',
- envvar='GITHUB_TOKEN',
+ "--github-token",
+ envvar="GITHUB_TOKEN",
help=textwrap.dedent(
"""
GitHub token used to authenticate.
@@ -1915,13 +1915,13 @@ def is_package_in_dist(dist_files: list[str], package: str) -> bool:
https://github.com/settings/tokens/new?description=Read%20sssues&scopes=repo:status"""
),
)
-@click.option('--suffix', default='rc1')
+@click.option("--suffix", default="rc1")
@click.option(
- '--only-available-in-dist',
+ "--only-available-in-dist",
is_flag=True,
- help='Only consider package ids with packages prepared in the dist folder',
+ help="Only consider package ids with packages prepared in the dist folder",
)
-@click.option('--excluded-pr-list', type=str, help="Coma-separated list of PRs to exclude from the issue.")
+@click.option("--excluded-pr-list", type=str, help="Coma-separated list of PRs to exclude from the issue.")
@argument_package_ids
def generate_issue_content(
package_ids: list[str],
@@ -1983,10 +1983,10 @@ def generate_issue_content(
else:
non_interesting_providers[package_id] = ProviderPRInfo(provider_details, pull_request_list)
context = {
- 'interesting_providers': interesting_providers,
- 'date': datetime.now(),
- 'suffix': suffix,
- 'non_interesting_providers': non_interesting_providers,
+ "interesting_providers": interesting_providers,
+ "date": datetime.now(),
+ "suffix": suffix,
+ "non_interesting_providers": non_interesting_providers,
}
issue_content = render_template(template_name="PROVIDER_ISSUE", context=context, extension=".md")
console.print()
@@ -2022,7 +2022,7 @@ if __name__ == "__main__":
try:
cli()
except KeyboardInterrupt:
- print('Interrupted')
+ print("Interrupted")
try:
sys.exit(65)
except SystemExit:
diff --git a/dev/provider_packages/remove_old_releases.py b/dev/provider_packages/remove_old_releases.py
index 44da102ae0..0d3c8d52bd 100644
--- a/dev/provider_packages/remove_old_releases.py
+++ b/dev/provider_packages/remove_old_releases.py
@@ -82,21 +82,21 @@ def process_all_files(directory: str, suffix: str, execute: bool):
def parse_args() -> argparse.Namespace:
- parser = argparse.ArgumentParser(description='Removes old releases.')
+ parser = argparse.ArgumentParser(description="Removes old releases.")
parser.add_argument(
- '--directory',
- dest='directory',
- action='store',
+ "--directory",
+ dest="directory",
+ action="store",
required=True,
- help='Directory to remove old releases in',
+ help="Directory to remove old releases in",
)
parser.add_argument(
- '--execute', dest='execute', action='store_true', help='Execute the removal rather than dry run'
+ "--execute", dest="execute", action="store_true", help="Execute the removal rather than dry run"
)
return parser.parse_args()
-if __name__ == '__main__':
+if __name__ == "__main__":
args = parse_args()
process_all_files(args.directory, ".tar.gz", args.execute)
process_all_files(args.directory, ".tar.gz.sha512", args.execute)
diff --git a/dev/send_email.py b/dev/send_email.py
index e54180b698..505eb73fca 100755
--- a/dev/send_email.py
+++ b/dev/send_email.py
@@ -188,7 +188,7 @@ class BaseParameters:
"-n",
"--name",
prompt="Your Name",
- default=lambda: os.environ.get('USER', ''),
+ default=lambda: os.environ.get("USER", ""),
show_default="Current User",
help="Name of the Release Manager",
type=click.STRING,
@@ -336,5 +336,5 @@ def announce(base_parameters, receiver_email: str):
show_message("Twitter", twitter_msg)
-if __name__ == '__main__':
+if __name__ == "__main__":
cli()
diff --git a/dev/stats/calculate_statistics_provider_testing_issues.py b/dev/stats/calculate_statistics_provider_testing_issues.py
index 4138624742..7172c2beef 100755
--- a/dev/stats/calculate_statistics_provider_testing_issues.py
+++ b/dev/stats/calculate_statistics_provider_testing_issues.py
@@ -40,7 +40,7 @@ MY_DIR_PATH = Path(os.path.dirname(__file__))
SOURCE_DIR_PATH = MY_DIR_PATH / os.pardir / os.pardir
-@click.group(context_settings={'help_option_names': ['-h', '--help'], 'max_content_width': 500})
+@click.group(context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 500})
def cli():
...
@@ -62,7 +62,7 @@ option_github_token = click.option(
Can be generated with:
https://github.com/settings/tokens/new?description=Read%20Write%20isssues&scopes=repo"""
),
- envvar='GITHUB_TOKEN',
+ envvar="GITHUB_TOKEN",
)
diff --git a/dev/stats/get_important_pr_candidates.py b/dev/stats/get_important_pr_candidates.py
index 706f0be1d6..5e22966b35 100755
--- a/dev/stats/get_important_pr_candidates.py
+++ b/dev/stats/get_important_pr_candidates.py
@@ -51,7 +51,7 @@ option_github_token = click.option(
Can be generated with:
https://github.com/settings/tokens/new?description=Read%20issues&scopes=repo:status"""
),
- envvar='GITHUB_TOKEN',
+ envvar="GITHUB_TOKEN",
)
@@ -90,7 +90,7 @@ class PrStat:
for comment in self.pull_request.get_comments():
self._users.add(comment.user.login)
lowercase_body = comment.body.lower()
- if 'protm' in lowercase_body:
+ if "protm" in lowercase_body:
num_protm += 1
num_comments += 1
self.protm_score = num_protm
@@ -104,7 +104,7 @@ class PrStat:
for conv_comment in self.pull_request.get_issue_comments():
self._users.add(conv_comment.user.login)
lowercase_body = conv_comment.body.lower()
- if 'protm' in lowercase_body:
+ if "protm" in lowercase_body:
num_protm += 1
num_conv_comments += 1
self.protm_score = num_protm
@@ -143,7 +143,7 @@ class PrStat:
def issues(self):
"""finds issues in PR"""
if self.pull_request.body is not None:
- regex = r'(?<=closes: #|elated: #)\d{5}'
+ regex = r"(?<=closes: #|elated: #)\d{5}"
issue_strs = re.findall(regex, self.pull_request.body)
issue_ints = [eval(s) for s in issue_strs]
self.issue_nums = issue_ints
@@ -296,15 +296,15 @@ class PrStat:
def __str__(self) -> str:
if self.protm_score > 0:
return (
- '[magenta]##Tagged PR## [/]'
+ "[magenta]##Tagged PR## [/]"
f"Score: {self.score:.2f}: PR{self.pull_request.number} by @{self.pull_request.user.login}: "
- f"\"{self.pull_request.title}\". "
+ f'"{self.pull_request.title}". '
f"Merged at {self.pull_request.merged_at}: {self.pull_request.html_url}"
)
else:
return (
f"Score: {self.score:.2f}: PR{self.pull_request.number} by @{self.pull_request.user.login}: "
- f"\"{self.pull_request.title}\". "
+ f'"{self.pull_request.title}". '
f"Merged at {self.pull_request.merged_at}: {self.pull_request.html_url}"
)
@@ -312,33 +312,33 @@ class PrStat:
if self.protm_score > 0:
console.print("********************* Tagged with '#protm' *********************", style="magenta")
return (
- f'-- Created at [bright_blue]{self.pull_request.created_at}[/], '
- f'merged at [bright_blue]{self.pull_request.merged_at}[/]\n'
- f'-- Label score: [green]{self.label_score}[/]\n'
- f'-- Length score: [green]{self.length_score}[/] '
- f'(body length: {self.body_length}, '
- f'comment length: {self.comment_length})\n'
- f'-- Interaction score: [green]{self.interaction_score}[/] '
- f'(users interacting: {self.num_interacting_users}, '
- f'reviews: {self.num_reviews}, '
- f'review comments: {self.num_comments}, '
- f'review reactions: {self.num_reactions}, '
- f'non-review comments: {self.num_conv_comments}, '
- f'non-review reactions: {self.num_conv_reactions}, '
- f'issue comments: {self.num_issue_comments}, '
- f'issue reactions: {self.num_issue_reactions})\n'
- f'-- Change score: [green]{self.change_score}[/] '
- f'(changed files: {self.num_changed_files}, '
- f'additions: {self.num_additions}, '
- f'deletions: {self.num_deletions})\n'
- f'-- Overall score: [red]{self.score:.2f}[/]\n'
+ f"-- Created at [bright_blue]{self.pull_request.created_at}[/], "
+ f"merged at [bright_blue]{self.pull_request.merged_at}[/]\n"
+ f"-- Label score: [green]{self.label_score}[/]\n"
+ f"-- Length score: [green]{self.length_score}[/] "
+ f"(body length: {self.body_length}, "
+ f"comment length: {self.comment_length})\n"
+ f"-- Interaction score: [green]{self.interaction_score}[/] "
+ f"(users interacting: {self.num_interacting_users}, "
+ f"reviews: {self.num_reviews}, "
+ f"review comments: {self.num_comments}, "
+ f"review reactions: {self.num_reactions}, "
+ f"non-review comments: {self.num_conv_comments}, "
+ f"non-review reactions: {self.num_conv_reactions}, "
+ f"issue comments: {self.num_issue_comments}, "
+ f"issue reactions: {self.num_issue_reactions})\n"
+ f"-- Change score: [green]{self.change_score}[/] "
+ f"(changed files: {self.num_changed_files}, "
+ f"additions: {self.num_additions}, "
+ f"deletions: {self.num_deletions})\n"
+ f"-- Overall score: [red]{self.score:.2f}[/]\n"
)
DAYS_BACK = 5
# Current (or previous during first few days of the next month)
-DEFAULT_BEGINNING_OF_MONTH = pendulum.now().subtract(days=DAYS_BACK).start_of('month')
-DEFAULT_END_OF_MONTH = DEFAULT_BEGINNING_OF_MONTH.end_of('month').add(days=1)
+DEFAULT_BEGINNING_OF_MONTH = pendulum.now().subtract(days=DAYS_BACK).start_of("month")
+DEFAULT_END_OF_MONTH = DEFAULT_BEGINNING_OF_MONTH.end_of("month").add(days=1)
MAX_PR_CANDIDATES = 500
DEFAULT_TOP_PRS = 10
@@ -347,15 +347,15 @@ DEFAULT_TOP_PRS = 10
@click.command()
@option_github_token # TODO: this should only be required if --load isn't provided
@click.option(
- '--date-start', type=click.DateTime(formats=["%Y-%m-%d"]), default=str(DEFAULT_BEGINNING_OF_MONTH.date())
+ "--date-start", type=click.DateTime(formats=["%Y-%m-%d"]), default=str(DEFAULT_BEGINNING_OF_MONTH.date())
)
@click.option(
- '--date-end', type=click.DateTime(formats=["%Y-%m-%d"]), default=str(DEFAULT_END_OF_MONTH.date())
+ "--date-end", type=click.DateTime(formats=["%Y-%m-%d"]), default=str(DEFAULT_END_OF_MONTH.date())
)
-@click.option('--top-number', type=int, default=DEFAULT_TOP_PRS, help="The number of PRs to select")
-@click.option('--save', type=click.File("wb"), help="Save PR data to a pickle file")
-@click.option('--load', type=click.File("rb"), help="Load PR data from a file and recalcuate scores")
-@click.option('--verbose', is_flag="True", help="Print scoring details")
+@click.option("--top-number", type=int, default=DEFAULT_TOP_PRS, help="The number of PRs to select")
+@click.option("--save", type=click.File("wb"), help="Save PR data to a pickle file")
+@click.option("--load", type=click.File("rb"), help="Load PR data from a file and recalcuate scores")
+@click.option("--verbose", is_flag="True", help="Print scoring details")
def main(
github_token: str,
date_start: datetime,
@@ -368,7 +368,7 @@ def main(
selected_prs: list[PrStat] = []
if load:
console.print("Loading PRs from cache and recalculating scores.")
- selected_prs = pickle.load(load, encoding='bytes')
+ selected_prs = pickle.load(load, encoding="bytes")
issue_num = 0
for pr_stat in selected_prs:
issue_num += 1
@@ -385,7 +385,7 @@ def main(
console.print(f"Finding best candidate PRs between {date_start} and {date_end}.")
g = Github(github_token)
repo = g.get_repo("apache/airflow")
- pulls = repo.get_pulls(state="closed", sort="created", direction='desc')
+ pulls = repo.get_pulls(state="closed", sort="created", direction="desc")
issue_num = 0
for pr in pulls:
if not pr.merged:
@@ -415,7 +415,7 @@ def main(
selected_prs.append(pr_stat)
if issue_num == MAX_PR_CANDIDATES:
- console.print(f'[red]Reached {MAX_PR_CANDIDATES}. Stopping')
+ console.print(f"[red]Reached {MAX_PR_CANDIDATES}. Stopping")
break
console.print(f"Top {top_number} out of {issue_num} PRs:")
diff --git a/dev/system_tests/update_issue_status.py b/dev/system_tests/update_issue_status.py
index c792659cd9..34a0262170 100755
--- a/dev/system_tests/update_issue_status.py
+++ b/dev/system_tests/update_issue_status.py
@@ -47,7 +47,7 @@ option_github_token = click.option(
Can be generated with:
https://github.com/settings/tokens/new?description=Write%20issues&scopes=repo"""
),
- envvar='GITHUB_TOKEN',
+ envvar="GITHUB_TOKEN",
)
option_verbose = click.option(
@@ -73,7 +73,7 @@ option_repository = click.option(
option_labels = click.option(
"--labels",
type=str,
- default='AIP-47',
+ default="AIP-47",
help="Label to filter the issues on (coma-separated)",
)
@@ -97,7 +97,7 @@ def process_paths_from_body(body: str, dry_run: bool, verbose: bool) -> tuple[st
if line.startswith("- ["):
if verbose:
console.print(line)
- path = SOURCE_DIR_PATH / line[len("- [ ] ") :].strip().split(' ')[0]
+ path = SOURCE_DIR_PATH / line[len("- [ ] ") :].strip().split(" ")[0]
if path.exists():
count_all += 1
prefix = ""
@@ -141,7 +141,7 @@ def update_issue_status(
"""Update status of the issues regarding the AIP-47 migration."""
g = Github(github_token)
repo = g.get_repo(repository)
- issues = repo.get_issues(labels=labels.split(','), state='all')
+ issues = repo.get_issues(labels=labels.split(","), state="all")
max_issues = max_issues if max_issues is not None else issues.totalCount
total_re_added = 0
total_completed = 0
@@ -212,14 +212,14 @@ def update_issue_status(
all = per_issue_num_all[issue.id]
done = per_issue_num_done[issue.id]
console.print(
- fr" * [[yellow]{issue.title}[/]]({issue.html_url}): "
+ rf" * [[yellow]{issue.title}[/]]({issue.html_url}): "
f"{done}/{all} : {done * 100 / all:.2f}%"
)
console.print()
if completed_open_issues:
console.print("[yellow] Issues that are completed and should be closed:[/]\n")
for issue in completed_open_issues:
- console.print(fr" * [[yellow]{issue.title}[/]]({issue.html_url})")
+ console.print(rf" * [[yellow]{issue.title}[/]]({issue.html_url})")
console.print()
if not_completed_opened_issues:
console.print("[yellow] Issues that are not completed and are still opened:[/]\n")
@@ -227,14 +227,14 @@ def update_issue_status(
all = per_issue_num_all[issue.id]
done = per_issue_num_done[issue.id]
console.print(
- fr" * [[yellow]{issue.title}[/]]({issue.html_url}): "
+ rf" * [[yellow]{issue.title}[/]]({issue.html_url}): "
f"{done}/{all} : {done * 100 / all:.2f}%"
)
console.print()
if completed_closed_issues:
console.print("[green] Issues that are completed and are already closed:[/]\n")
for issue in completed_closed_issues:
- console.print(fr" * [[green]{issue.title}[/]]({issue.html_url})")
+ console.print(rf" * [[green]{issue.title}[/]]({issue.html_url})")
console.print()
console.print()
diff --git a/dev/validate_version_added_fields_in_config.py b/dev/validate_version_added_fields_in_config.py
index c3b670861b..a43e149175 100755
--- a/dev/validate_version_added_fields_in_config.py
+++ b/dev/validate_version_added_fields_in_config.py
@@ -31,32 +31,32 @@ ROOT_DIR = Path(__file__).resolve().parent / ".."
KNOWN_FALSE_DETECTIONS = {
# This option has been added in v2.0.0, but we had mistake in config.yml file until v2.2.0.
# https://github.com/apache/airflow/pull/17808
- ('logging', 'extra_logger_names', '2.2.0')
+ ("logging", "extra_logger_names", "2.2.0")
}
def fetch_pypi_versions() -> list[str]:
- r = requests.get('https://pypi.org/pypi/apache-airflow/json')
+ r = requests.get("https://pypi.org/pypi/apache-airflow/json")
r.raise_for_status()
- all_version = r.json()['releases'].keys()
- released_versions = [d for d in all_version if not (('rc' in d) or ('b' in d))]
+ all_version = r.json()["releases"].keys()
+ released_versions = [d for d in all_version if not (("rc" in d) or ("b" in d))]
return released_versions
@functools.lru_cache()
def fetch_config_options_for_version(version: str) -> set[tuple[str, str]]:
r = requests.get(
- f'https://raw.githubusercontent.com/apache/airflow/{version}/airflow/config_templates/config.yml'
+ f"https://raw.githubusercontent.com/apache/airflow/{version}/airflow/config_templates/config.yml"
)
r.raise_for_status()
config_sections = yaml.safe_load(r.text)
config_options = {
(
- config_section['name'],
- config_option['name'],
+ config_section["name"],
+ config_option["name"],
)
for config_section in config_sections
- for config_option in config_section['options']
+ for config_option in config_section["options"]
}
return config_options
@@ -64,9 +64,9 @@ def fetch_config_options_for_version(version: str) -> set[tuple[str, str]]:
def read_local_config_options() -> set[tuple[str, str, str]]:
config_sections = yaml.safe_load((ROOT_DIR / "airflow" / "config_templates" / "config.yml").read_text())
config_options = {
- (config_section['name'], config_option['name'], config_option['version_added'])
+ (config_section["name"], config_option["name"], config_option["version_added"])
for config_section in config_sections
- for config_option in config_section['options']
+ for config_option in config_section["options"]
}
return config_options
diff --git a/docker_tests/docker_tests_utils.py b/docker_tests/docker_tests_utils.py
index ab49a0d4f7..c6c19d9c16 100644
--- a/docker_tests/docker_tests_utils.py
+++ b/docker_tests/docker_tests_utils.py
@@ -20,7 +20,7 @@ import os
from docker_tests.command_utils import run_command
-docker_image = os.environ.get('DOCKER_IMAGE')
+docker_image = os.environ.get("DOCKER_IMAGE")
if not docker_image:
raise Exception("The DOCKER_IMAGE environment variable is required")
diff --git a/docker_tests/test_ci_image.py b/docker_tests/test_ci_image.py
index a9dab204f6..2c3fabd7d9 100644
--- a/docker_tests/test_ci_image.py
+++ b/docker_tests/test_ci_image.py
@@ -26,7 +26,7 @@ class TestPythonPackages:
def test_pip_dependencies_conflict(self):
try:
run_command(
- ["docker", "run", "--rm", "--entrypoint", "/bin/bash", docker_image, "-c", 'pip check']
+ ["docker", "run", "--rm", "--entrypoint", "/bin/bash", docker_image, "-c", "pip check"]
)
except subprocess.CalledProcessError as ex:
display_dependency_conflict_message()
diff --git a/docker_tests/test_docker_compose_quick_start.py b/docker_tests/test_docker_compose_quick_start.py
index 3d2f087f2d..6f25f62578 100644
--- a/docker_tests/test_docker_compose_quick_start.py
+++ b/docker_tests/test_docker_compose_quick_start.py
@@ -62,7 +62,7 @@ def tmp_chdir(path):
def wait_for_container(container_id: str, timeout: int = 300):
container_name = (
- subprocess.check_output(["docker", "inspect", container_id, "--format", '{{ .Name }}'])
+ subprocess.check_output(["docker", "inspect", container_id, "--format", "{{ .Name }}"])
.decode()
.strip()
)
@@ -71,11 +71,11 @@ def wait_for_container(container_id: str, timeout: int = 300):
start_time = monotonic()
while not waiting_done:
container_state = (
- subprocess.check_output(["docker", "inspect", container_id, "--format", '{{ .State.Status }}'])
+ subprocess.check_output(["docker", "inspect", container_id, "--format", "{{ .State.Status }}"])
.decode()
.strip()
)
- if container_state in ("running", 'restarting'):
+ if container_state in ("running", "restarting"):
health_status = (
subprocess.check_output(
[
@@ -117,7 +117,7 @@ def test_trigger_dag_and_wait_for_result():
)
with tempfile.TemporaryDirectory() as tmp_dir, tmp_chdir(tmp_dir), mock.patch.dict(
- 'os.environ', AIRFLOW_IMAGE_NAME=docker_image
+ "os.environ", AIRFLOW_IMAGE_NAME=docker_image
):
copyfile(str(compose_file_path), f"{tmp_dir}/docker-compose.yaml")
os.mkdir(f"{tmp_dir}/dags")
@@ -140,7 +140,7 @@ def test_trigger_dag_and_wait_for_result():
# https://github.com/docker/compose/releases/tag/v2.1.1
# https://github.com/docker/compose/pull/8777
for container_id in (
- subprocess.check_output(["docker-compose", 'ps', '-q']).decode().strip().splitlines()
+ subprocess.check_output(["docker-compose", "ps", "-q"]).decode().strip().splitlines()
):
wait_for_container(container_id)
api_request("PATCH", path=f"dags/{DAG_ID}", json={"is_paused": False})
diff --git a/docker_tests/test_examples_of_prod_image_building.py b/docker_tests/test_examples_of_prod_image_building.py
index c759cb6713..978473e64e 100644
--- a/docker_tests/test_examples_of_prod_image_building.py
+++ b/docker_tests/test_examples_of_prod_image_building.py
@@ -33,13 +33,13 @@ DOCKER_EXAMPLES_DIR = SOURCE_ROOT / "docs" / "docker-stack" / "docker-examples"
@lru_cache(maxsize=None)
def get_latest_airflow_version_released():
- response = requests.get('https://pypi.org/pypi/apache-airflow/json')
+ response = requests.get("https://pypi.org/pypi/apache-airflow/json")
response.raise_for_status()
- return response.json()['info']['version']
+ return response.json()["info"]["version"]
@pytest.mark.skipif(
- os.environ.get('CI') == "true",
+ os.environ.get("CI") == "true",
reason="Skipping the script builds on CI! They take very long time to build.",
)
@pytest.mark.parametrize("script_file", glob.glob(f"{DOCKER_EXAMPLES_DIR}/**/*.sh", recursive=True))
@@ -53,11 +53,11 @@ def test_dockerfile_example(dockerfile):
image_name = str(rel_dockerfile_path).lower().replace("/", "-")
content = Path(dockerfile).read_text()
new_content = re.sub(
- r'FROM apache/airflow:.*', fr'FROM apache/airflow:{get_latest_airflow_version_released()}', content
+ r"FROM apache/airflow:.*", rf"FROM apache/airflow:{get_latest_airflow_version_released()}", content
)
try:
run_command(
- ["docker", "build", ".", "--tag", image_name, '-f', '-'],
+ ["docker", "build", ".", "--tag", image_name, "-f", "-"],
cwd=str(Path(dockerfile).parent),
input=new_content.encode(),
)
diff --git a/docker_tests/test_prod_image.py b/docker_tests/test_prod_image.py
index cebe45105a..f1d7fbd018 100644
--- a/docker_tests/test_prod_image.py
+++ b/docker_tests/test_prod_image.py
@@ -89,7 +89,7 @@ class TestPythonPackages:
"airflow providers list --output json", stderr=subprocess.DEVNULL, return_output=True
)
providers = json.loads(output)
- packages_installed = {d['package_name'] for d in providers}
+ packages_installed = {d["package_name"] for d in providers}
assert len(packages_installed) != 0
assert packages_to_install == packages_installed, (
@@ -108,16 +108,16 @@ class TestPythonPackages:
"amazon": ["boto3", "botocore", "watchtower"],
"async": ["gevent", "eventlet", "greenlet"],
"azure": [
- 'azure.batch',
- 'azure.cosmos',
- 'azure.datalake.store',
- 'azure.identity',
- 'azure.keyvault.secrets',
- 'azure.kusto.data',
- 'azure.mgmt.containerinstance',
- 'azure.mgmt.datalake.store',
- 'azure.mgmt.resource',
- 'azure.storage',
+ "azure.batch",
+ "azure.cosmos",
+ "azure.datalake.store",
+ "azure.identity",
+ "azure.keyvault.secrets",
+ "azure.kusto.data",
+ "azure.mgmt.containerinstance",
+ "azure.mgmt.datalake.store",
+ "azure.mgmt.resource",
+ "azure.storage",
],
"celery": ["celery", "flower", "vine"],
"cncf.kubernetes": ["kubernetes", "cryptography"],
@@ -125,35 +125,35 @@ class TestPythonPackages:
"docker": ["docker"],
"elasticsearch": ["elasticsearch", "es.elastic", "elasticsearch_dsl"],
"google": [
- 'OpenSSL',
- 'google.ads',
- 'googleapiclient',
- 'google.auth',
- 'google_auth_httplib2',
- 'google.cloud.automl',
- 'google.cloud.bigquery_datatransfer',
- 'google.cloud.bigtable',
- 'google.cloud.container',
- 'google.cloud.datacatalog',
- 'google.cloud.dataproc',
- 'google.cloud.dlp',
- 'google.cloud.kms',
- 'google.cloud.language',
- 'google.cloud.logging',
- 'google.cloud.memcache',
- 'google.cloud.monitoring',
- 'google.cloud.oslogin',
- 'google.cloud.pubsub',
- 'google.cloud.redis',
- 'google.cloud.secretmanager',
- 'google.cloud.spanner',
- 'google.cloud.speech',
- 'google.cloud.storage',
- 'google.cloud.tasks',
- 'google.cloud.texttospeech',
- 'google.cloud.translate',
- 'google.cloud.videointelligence',
- 'google.cloud.vision',
+ "OpenSSL",
+ "google.ads",
+ "googleapiclient",
+ "google.auth",
+ "google_auth_httplib2",
+ "google.cloud.automl",
+ "google.cloud.bigquery_datatransfer",
+ "google.cloud.bigtable",
+ "google.cloud.container",
+ "google.cloud.datacatalog",
+ "google.cloud.dataproc",
+ "google.cloud.dlp",
+ "google.cloud.kms",
+ "google.cloud.language",
+ "google.cloud.logging",
+ "google.cloud.memcache",
+ "google.cloud.monitoring",
+ "google.cloud.oslogin",
+ "google.cloud.pubsub",
+ "google.cloud.redis",
+ "google.cloud.secretmanager",
+ "google.cloud.spanner",
+ "google.cloud.speech",
+ "google.cloud.storage",
+ "google.cloud.tasks",
+ "google.cloud.texttospeech",
+ "google.cloud.translate",
+ "google.cloud.videointelligence",
+ "google.cloud.vision",
],
"grpc": ["grpc", "google.auth", "google_auth_httplib2"],
"hashicorp": ["hvac"],
@@ -194,7 +194,7 @@ class TestExecuteAsRoot:
def test_run_custom_python_packages_as_root(self):
with tempfile.TemporaryDirectory() as tmp_dir:
- (Path(tmp_dir) / "__init__.py").write_text('')
+ (Path(tmp_dir) / "__init__.py").write_text("")
(Path(tmp_dir) / "awesome.py").write_text('print("Awesome")')
run_command(
diff --git a/docs/build_docs.py b/docs/build_docs.py
index 4ad8b0b83b..d1fb06ccac 100755
--- a/docs/build_docs.py
+++ b/docs/build_docs.py
@@ -37,8 +37,8 @@ from docs.exts.docs_build.github_action_utils import with_group
from docs.exts.docs_build.package_filter import process_package_filters
from docs.exts.docs_build.spelling_checks import SpellingError, display_spelling_error_summary
-TEXT_RED = '\033[31m'
-TEXT_RESET = '\033[0m'
+TEXT_RED = "\033[31m"
+TEXT_RESET = "\033[0m"
if __name__ not in ("__main__", "__mp_main__"):
raise SystemExit(
@@ -53,18 +53,18 @@ Invitation link: https://s.apache.org/airflow-slack\
"""
ERRORS_ELIGIBLE_TO_REBUILD = [
- 'failed to reach any of the inventories with the following issues',
- 'toctree contains reference to nonexisting document',
- 'undefined label:',
- 'unknown document:',
- 'Error loading airflow.providers',
+ "failed to reach any of the inventories with the following issues",
+ "toctree contains reference to nonexisting document",
+ "undefined label:",
+ "unknown document:",
+ "Error loading airflow.providers",
]
-ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS', 'false') == "true"
+ON_GITHUB_ACTIONS = os.environ.get("GITHUB_ACTIONS", "false") == "true"
console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH)
-T = TypeVar('T')
+T = TypeVar("T")
def partition(pred: Callable[[T], bool], iterable: Iterable[T]) -> tuple[Iterable[T], Iterable[T]]:
@@ -100,24 +100,24 @@ def _promote_new_flags():
def _get_parser():
available_packages_list = " * " + "\n * ".join(get_available_packages())
parser = argparse.ArgumentParser(
- description='Builds documentation and runs spell checking',
+ description="Builds documentation and runs spell checking",
epilog=f"List of supported documentation packages:\n{available_packages_list}",
)
parser.formatter_class = argparse.RawTextHelpFormatter
parser.add_argument(
- '--disable-checks', dest='disable_checks', action='store_true', help='Disables extra checks'
+ "--disable-checks", dest="disable_checks", action="store_true", help="Disables extra checks"
)
parser.add_argument(
- '--disable-provider-checks',
- dest='disable_provider_checks',
- action='store_true',
- help='Disables extra checks for providers',
+ "--disable-provider-checks",
+ dest="disable_provider_checks",
+ action="store_true",
+ help="Disables extra checks for providers",
)
parser.add_argument(
- '--one-pass-only',
- dest='one_pass_only',
- action='store_true',
- help='Do not attempt multiple builds on error',
+ "--one-pass-only",
+ dest="one_pass_only",
+ action="store_true",
+ help="Do not attempt multiple builds on error",
)
parser.add_argument(
"--package-filter",
@@ -126,20 +126,20 @@ def _get_parser():
"Filter specifying for which packages the documentation is to be built. Wildcard are supported."
),
)
- parser.add_argument('--docs-only', dest='docs_only', action='store_true', help='Only build documentation')
+ parser.add_argument("--docs-only", dest="docs_only", action="store_true", help="Only build documentation")
parser.add_argument(
- '--spellcheck-only', dest='spellcheck_only', action='store_true', help='Only perform spellchecking'
+ "--spellcheck-only", dest="spellcheck_only", action="store_true", help="Only perform spellchecking"
)
parser.add_argument(
- '--for-production',
- dest='for_production',
- action='store_true',
- help='Builds documentation for official release i.e. all links point to stable version',
+ "--for-production",
+ dest="for_production",
+ action="store_true",
+ help="Builds documentation for official release i.e. all links point to stable version",
)
parser.add_argument(
"-j",
"--jobs",
- dest='jobs',
+ dest="jobs",
type=int,
default=0,
help=(
@@ -153,11 +153,11 @@ def _get_parser():
parser.add_argument(
"-v",
"--verbose",
- dest='verbose',
- action='store_true',
+ dest="verbose",
+ action="store_true",
help=(
- 'Increases the verbosity of the script i.e. always displays a full log of '
- 'the build process, not just when it encounters errors'
+ "Increases the verbosity of the script i.e. always displays a full log of "
+ "the build process, not just when it encounters errors"
),
)
@@ -413,7 +413,7 @@ def display_packages_summary(
"Count of doc build errors": len(build_errors.get(package_name, [])),
"Count of spelling errors": len(spelling_errors.get(package_name, [])),
}
- for package_name in sorted(packages_names, key=lambda k: k or '')
+ for package_name in sorted(packages_names, key=lambda k: k or "")
]
console.print("#" * 20, " Packages errors summary ", "#" * 20)
console.print(tabulate(tabular_data=tabular_data, headers="keys"))
diff --git a/docs/conf.py b/docs/conf.py
index 38f1694e7b..f75704d9dd 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -43,23 +43,23 @@ import yaml
import airflow
from airflow.configuration import AirflowConfigParser, default_config_yaml
-sys.path.append(str(Path(__file__).parent / 'exts'))
+sys.path.append(str(Path(__file__).parent / "exts"))
from docs_build.third_party_inventories import THIRD_PARTY_INDEXES # noqa: E402
CONF_DIR = pathlib.Path(__file__).parent.absolute()
-INVENTORY_CACHE_DIR = CONF_DIR / '_inventory_cache'
+INVENTORY_CACHE_DIR = CONF_DIR / "_inventory_cache"
ROOT_DIR = CONF_DIR.parent
-FOR_PRODUCTION = os.environ.get('AIRFLOW_FOR_PRODUCTION', 'false') == 'true'
+FOR_PRODUCTION = os.environ.get("AIRFLOW_FOR_PRODUCTION", "false") == "true"
# By default (e.g. on RTD), build docs for `airflow` package
-PACKAGE_NAME = os.environ.get('AIRFLOW_PACKAGE_NAME', 'apache-airflow')
+PACKAGE_NAME = os.environ.get("AIRFLOW_PACKAGE_NAME", "apache-airflow")
PACKAGE_DIR: pathlib.Path
-if PACKAGE_NAME == 'apache-airflow':
- PACKAGE_DIR = ROOT_DIR / 'airflow'
+if PACKAGE_NAME == "apache-airflow":
+ PACKAGE_DIR = ROOT_DIR / "airflow"
PACKAGE_VERSION = airflow.__version__
SYSTEM_TESTS_DIR = None
-elif PACKAGE_NAME.startswith('apache-airflow-providers-'):
+elif PACKAGE_NAME.startswith("apache-airflow-providers-"):
from provider_yaml_utils import load_package_data
ALL_PROVIDER_YAMLS = load_package_data()
@@ -67,39 +67,39 @@ elif PACKAGE_NAME.startswith('apache-airflow-providers-'):
CURRENT_PROVIDER = next(
provider_yaml
for provider_yaml in ALL_PROVIDER_YAMLS
- if provider_yaml['package-name'] == PACKAGE_NAME
+ if provider_yaml["package-name"] == PACKAGE_NAME
)
except StopIteration:
raise Exception(f"Could not find provider.yaml file for package: {PACKAGE_NAME}")
- PACKAGE_DIR = pathlib.Path(CURRENT_PROVIDER['package-dir'])
- PACKAGE_VERSION = CURRENT_PROVIDER['versions'][0]
- SYSTEM_TESTS_DIR = CURRENT_PROVIDER['system-tests-dir']
-elif PACKAGE_NAME == 'apache-airflow-providers':
+ PACKAGE_DIR = pathlib.Path(CURRENT_PROVIDER["package-dir"])
+ PACKAGE_VERSION = CURRENT_PROVIDER["versions"][0]
+ SYSTEM_TESTS_DIR = CURRENT_PROVIDER["system-tests-dir"]
+elif PACKAGE_NAME == "apache-airflow-providers":
from provider_yaml_utils import load_package_data
- PACKAGE_DIR = ROOT_DIR / 'airflow' / 'providers'
- PACKAGE_VERSION = 'devel'
+ PACKAGE_DIR = ROOT_DIR / "airflow" / "providers"
+ PACKAGE_VERSION = "devel"
ALL_PROVIDER_YAMLS = load_package_data()
SYSTEM_TESTS_DIR = None
-elif PACKAGE_NAME == 'helm-chart':
- PACKAGE_DIR = ROOT_DIR / 'chart'
- chart_yaml_file = PACKAGE_DIR / 'Chart.yaml'
+elif PACKAGE_NAME == "helm-chart":
+ PACKAGE_DIR = ROOT_DIR / "chart"
+ chart_yaml_file = PACKAGE_DIR / "Chart.yaml"
with chart_yaml_file.open() as chart_file:
chart_yaml_contents = yaml.safe_load(chart_file)
- PACKAGE_VERSION = chart_yaml_contents['version']
+ PACKAGE_VERSION = chart_yaml_contents["version"]
SYSTEM_TESTS_DIR = None
else:
- PACKAGE_VERSION = 'devel'
+ PACKAGE_VERSION = "devel"
SYSTEM_TESTS_DIR = None
# Adds to environment variables for easy access from other plugins like airflow_intersphinx.
-os.environ['AIRFLOW_PACKAGE_NAME'] = PACKAGE_NAME
+os.environ["AIRFLOW_PACKAGE_NAME"] = PACKAGE_NAME
# Hack to allow changing for piece of the code to behave differently while
# the docs are being built. The main objective was to alter the
# behavior of the utils.apply_default that was hiding function headers
-os.environ['BUILDING_AIRFLOW_DOCS'] = 'TRUE'
+os.environ["BUILDING_AIRFLOW_DOCS"] = "TRUE"
# == Sphinx configuration ======================================================
@@ -119,7 +119,7 @@ rst_epilog = f"""
.. |experimental| replace:: This is an :ref:`experimental feature <experimental>`.
"""
-smartquotes_excludes = {'builders': ['man', 'text', 'spelling']}
+smartquotes_excludes = {"builders": ["man", "text", "spelling"]}
# -- General configuration -----------------------------------------------------
@@ -129,31 +129,31 @@ smartquotes_excludes = {'builders': ['man', 'text', 'spelling']}
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
- 'provider_init_hack',
- 'sphinx.ext.autodoc',
- 'sphinx.ext.viewcode',
- 'sphinxarg.ext',
- 'sphinx.ext.intersphinx',
- 'exampleinclude',
- 'docroles',
- 'removemarktransform',
- 'sphinx_copybutton',
- 'airflow_intersphinx',
+ "provider_init_hack",
+ "sphinx.ext.autodoc",
+ "sphinx.ext.viewcode",
+ "sphinxarg.ext",
+ "sphinx.ext.intersphinx",
+ "exampleinclude",
+ "docroles",
+ "removemarktransform",
+ "sphinx_copybutton",
+ "airflow_intersphinx",
"sphinxcontrib.spelling",
- 'sphinx_airflow_theme',
- 'redirects',
- 'substitution_extensions',
+ "sphinx_airflow_theme",
+ "redirects",
+ "substitution_extensions",
]
-if PACKAGE_NAME == 'apache-airflow':
+if PACKAGE_NAME == "apache-airflow":
extensions.extend(
[
- 'sphinx_jinja',
- 'sphinx.ext.graphviz',
- 'sphinxcontrib.httpdomain',
- 'sphinxcontrib.httpdomain',
- 'extra_files_with_substitutions',
+ "sphinx_jinja",
+ "sphinx.ext.graphviz",
+ "sphinxcontrib.httpdomain",
+ "sphinxcontrib.httpdomain",
+ "extra_files_with_substitutions",
# First, generate redoc
- 'sphinxcontrib.redoc',
+ "sphinxcontrib.redoc",
# Second, update redoc script
"sphinx_script_update",
]
@@ -162,9 +162,9 @@ if PACKAGE_NAME == 'apache-airflow':
if PACKAGE_NAME == "apache-airflow-providers":
extensions.extend(
[
- 'sphinx_jinja',
- 'operators_and_hooks_ref',
- 'providers_packages_ref',
+ "sphinx_jinja",
+ "operators_and_hooks_ref",
+ "providers_packages_ref",
]
)
elif PACKAGE_NAME == "helm-chart":
@@ -173,23 +173,23 @@ elif PACKAGE_NAME == "docker-stack":
# No extra extensions
pass
else:
- extensions.append('autoapi.extension')
+ extensions.append("autoapi.extension")
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns: list[str]
-if PACKAGE_NAME == 'apache-airflow':
+if PACKAGE_NAME == "apache-airflow":
exclude_patterns = [
# We only link to selected subpackages.
- '_api/airflow/index.rst',
- 'README.rst',
+ "_api/airflow/index.rst",
+ "README.rst",
]
-elif PACKAGE_NAME.startswith('apache-airflow-providers-'):
+elif PACKAGE_NAME.startswith("apache-airflow-providers-"):
extensions.extend(
[
- 'sphinx_jinja',
+ "sphinx_jinja",
]
)
- exclude_patterns = ['operators/_partials']
+ exclude_patterns = ["operators/_partials"]
else:
exclude_patterns = []
@@ -198,7 +198,7 @@ def _get_rst_filepath_from_path(filepath: pathlib.Path):
if filepath.is_dir():
result = filepath
else:
- if filepath.name == '__init__.py':
+ if filepath.name == "__init__.py":
result = filepath.parent
else:
result = filepath.with_name(filepath.stem)
@@ -207,7 +207,7 @@ def _get_rst_filepath_from_path(filepath: pathlib.Path):
return f"_api/{result.relative_to(ROOT_DIR)}"
-if PACKAGE_NAME == 'apache-airflow':
+if PACKAGE_NAME == "apache-airflow":
# Exclude top-level packages
# do not exclude these top-level modules from the doc build:
_allowed_top_level = ("exceptions.py",)
@@ -237,13 +237,13 @@ if PACKAGE_NAME == 'apache-airflow':
for path in (root / "utils").iterdir():
if path.name not in browseable_utils:
exclude_patterns.append(_get_rst_filepath_from_path(path))
-elif PACKAGE_NAME != 'docker-stack':
+elif PACKAGE_NAME != "docker-stack":
exclude_patterns.extend(
_get_rst_filepath_from_path(f) for f in pathlib.Path(PACKAGE_DIR).glob("**/example_dags")
)
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['templates']
+templates_path = ["templates"]
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = True
@@ -253,11 +253,11 @@ keep_warnings = True
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'sphinx_airflow_theme'
+html_theme = "sphinx_airflow_theme"
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
-if PACKAGE_NAME == 'apache-airflow':
+if PACKAGE_NAME == "apache-airflow":
html_title = "Airflow Documentation"
else:
html_title = f"{PACKAGE_NAME} Documentation"
@@ -273,19 +273,19 @@ html_favicon = "../airflow/www/static/pin_32.png"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-if PACKAGE_NAME in ['apache-airflow', 'helm-chart']:
- html_static_path = [f'{PACKAGE_NAME}/static']
+if PACKAGE_NAME in ["apache-airflow", "helm-chart"]:
+ html_static_path = [f"{PACKAGE_NAME}/static"]
else:
html_static_path = []
# A list of JavaScript filename. The entry must be a filename string or a
# tuple containing the filename string and the attributes dictionary. The
# filename must be relative to the html_static_path, or a full URI with
# scheme like http://example.org/script.js.
-if PACKAGE_NAME in ['apache-airflow', 'helm-chart']:
- html_js_files = ['gh-jira-links.js']
+if PACKAGE_NAME in ["apache-airflow", "helm-chart"]:
+ html_js_files = ["gh-jira-links.js"]
else:
html_js_files = []
-if PACKAGE_NAME == 'apache-airflow':
+if PACKAGE_NAME == "apache-airflow":
html_extra_path = [
f"{ROOT_DIR}/docs/apache-airflow/howto/docker-compose/airflow.sh",
]
@@ -299,22 +299,22 @@ if PACKAGE_NAME == 'apache-airflow':
"installation/installing-from-sources.html",
]
-if PACKAGE_NAME == 'docker-stack':
+if PACKAGE_NAME == "docker-stack":
# Replace "|version|" inside ```` quotes
manual_substitutions_in_generated_html = ["build.html"]
# -- Theme configuration -------------------------------------------------------
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
- '**': [
- 'version-selector.html',
- 'searchbox.html',
- 'globaltoc.html',
+ "**": [
+ "version-selector.html",
+ "searchbox.html",
+ "globaltoc.html",
]
- if FOR_PRODUCTION and PACKAGE_VERSION != 'devel'
+ if FOR_PRODUCTION and PACKAGE_VERSION != "devel"
else [
- 'searchbox.html',
- 'globaltoc.html',
+ "searchbox.html",
+ "globaltoc.html",
]
}
@@ -325,21 +325,21 @@ html_use_index = True
html_show_copyright = False
# Theme configuration
-if PACKAGE_NAME.startswith('apache-airflow-providers-'):
+if PACKAGE_NAME.startswith("apache-airflow-providers-"):
# Only hide hidden items for providers. For Chart and Airflow we are using the approach where
# TOC is hidden but sidebar still shows the content (but we are not doing it for providers).
- html_theme_options: dict[str, Any] = {'hide_website_buttons': True, 'sidebar_includehidden': False}
+ html_theme_options: dict[str, Any] = {"hide_website_buttons": True, "sidebar_includehidden": False}
else:
- html_theme_options = {'hide_website_buttons': True, 'sidebar_includehidden': True}
+ html_theme_options = {"hide_website_buttons": True, "sidebar_includehidden": True}
if FOR_PRODUCTION:
- html_theme_options['navbar_links'] = [
- {'href': '/community/', 'text': 'Community'},
- {'href': '/meetups/', 'text': 'Meetups'},
- {'href': '/docs/', 'text': 'Documentation'},
- {'href': '/use-cases/', 'text': 'Use-cases'},
- {'href': '/announcements/', 'text': 'Announcements'},
- {'href': '/blog/', 'text': 'Blog'},
- {'href': '/ecosystem/', 'text': 'Ecosystem'},
+ html_theme_options["navbar_links"] = [
+ {"href": "/community/", "text": "Community"},
+ {"href": "/meetups/", "text": "Meetups"},
+ {"href": "/docs/", "text": "Documentation"},
+ {"href": "/use-cases/", "text": "Use-cases"},
+ {"href": "/announcements/", "text": "Announcements"},
+ {"href": "/blog/", "text": "Blog"},
+ {"href": "/ecosystem/", "text": "Ecosystem"},
]
# A dictionary of values to pass into the template engine's context for all pages.
@@ -347,7 +347,7 @@ html_context = {
# Google Analytics ID.
# For more information look at:
# https://github.com/readthedocs/sphinx_rtd_theme/blob/master/sphinx_rtd_theme/layout.html#L222-L232
- 'theme_analytics_id': 'UA-140539454-1',
+ "theme_analytics_id": "UA-140539454-1",
# Variables used to build a button for editing the source code
#
# The path is created according to the following template:
@@ -361,13 +361,13 @@ html_context = {
# https://github.com/readthedocs/sphinx_rtd_theme/blob/master/sphinx_rtd_theme/breadcrumbs.html#L45
# https://github.com/apache/airflow-site/blob/91f760c/sphinx_airflow_theme/sphinx_airflow_theme/suggest_change_button.html#L36-L40
#
- 'theme_vcs_pageview_mode': 'edit',
- 'conf_py_path': f'/docs/{PACKAGE_NAME}/',
- 'github_user': 'apache',
- 'github_repo': 'airflow',
- 'github_version': 'main',
- 'display_github': 'main',
- 'suffix': '.rst',
+ "theme_vcs_pageview_mode": "edit",
+ "conf_py_path": f"/docs/{PACKAGE_NAME}/",
+ "github_user": "apache",
+ "github_repo": "airflow",
+ "github_version": "main",
+ "display_github": "main",
+ "suffix": ".rst",
}
# == Extensions configuration ==================================================
@@ -376,7 +376,7 @@ html_context = {
# See: https://github.com/tardyp/sphinx-jinja
# Jinja context
-if PACKAGE_NAME == 'apache-airflow':
+if PACKAGE_NAME == "apache-airflow":
deprecated_options: dict[str, dict[str, tuple[str, str, str]]] = defaultdict(dict)
for (section, key), (
(deprecated_section, deprecated_key, since_version)
@@ -402,24 +402,24 @@ if PACKAGE_NAME == 'apache-airflow':
deprecated_options[section] = {k: v for k, v in sorted(deprecated_options[section].items())}
jinja_contexts = {
- 'config_ctx': {"configs": configs, "deprecated_options": deprecated_options},
- 'quick_start_ctx': {
- 'doc_root_url': f'https://airflow.apache.org/docs/apache-airflow/{PACKAGE_VERSION}/'
+ "config_ctx": {"configs": configs, "deprecated_options": deprecated_options},
+ "quick_start_ctx": {
+ "doc_root_url": f"https://airflow.apache.org/docs/apache-airflow/{PACKAGE_VERSION}/"
if FOR_PRODUCTION
else (
- 'http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com/docs/apache-airflow/latest/'
+ "http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com/docs/apache-airflow/latest/"
)
},
- 'official_download_page': {
- 'base_url': f'https://downloads.apache.org/airflow/{PACKAGE_VERSION}',
- 'closer_lua_url': f'https://www.apache.org/dyn/closer.lua/airflow/{PACKAGE_VERSION}',
- 'airflow_version': PACKAGE_VERSION,
+ "official_download_page": {
+ "base_url": f"https://downloads.apache.org/airflow/{PACKAGE_VERSION}",
+ "closer_lua_url": f"https://www.apache.org/dyn/closer.lua/airflow/{PACKAGE_VERSION}",
+ "airflow_version": PACKAGE_VERSION,
},
}
-elif PACKAGE_NAME.startswith('apache-airflow-providers-'):
+elif PACKAGE_NAME.startswith("apache-airflow-providers-"):
def _load_config():
- file_path = PACKAGE_DIR / 'config_templates' / 'config.yml'
+ file_path = PACKAGE_DIR / "config_templates" / "config.yml"
if not file_path.exists():
return {}
@@ -428,22 +428,22 @@ elif PACKAGE_NAME.startswith('apache-airflow-providers-'):
config = _load_config()
jinja_contexts = {
- 'config_ctx': {"configs": config},
- 'official_download_page': {
- 'base_url': 'https://downloads.apache.org/airflow/providers',
- 'closer_lua_url': 'https://www.apache.org/dyn/closer.lua/airflow/providers',
- 'package_name': PACKAGE_NAME,
- 'package_name_underscores': PACKAGE_NAME.replace('-', '_'),
- 'package_version': PACKAGE_VERSION,
+ "config_ctx": {"configs": config},
+ "official_download_page": {
+ "base_url": "https://downloads.apache.org/airflow/providers",
+ "closer_lua_url": "https://www.apache.org/dyn/closer.lua/airflow/providers",
+ "package_name": PACKAGE_NAME,
+ "package_name_underscores": PACKAGE_NAME.replace("-", "_"),
+ "package_version": PACKAGE_VERSION,
},
}
-elif PACKAGE_NAME == 'apache-airflow-providers':
+elif PACKAGE_NAME == "apache-airflow-providers":
jinja_contexts = {
- 'official_download_page': {
- 'all_providers': ALL_PROVIDER_YAMLS,
+ "official_download_page": {
+ "all_providers": ALL_PROVIDER_YAMLS,
},
}
-elif PACKAGE_NAME == 'helm-chart':
+elif PACKAGE_NAME == "helm-chart":
def _str_representer(dumper, data):
style = "|" if "\n" in data else None # show as a block scalar if we have more than 1 line
@@ -455,7 +455,7 @@ elif PACKAGE_NAME == 'helm-chart':
if value == "":
return '""'
if value is None:
- return '~'
+ return "~"
return str(value)
def _format_examples(param_name: str, schema: dict) -> str | None:
@@ -524,11 +524,11 @@ elif PACKAGE_NAME == 'helm-chart':
jinja_contexts = {
"params_ctx": {"sections": ordered_sections},
- 'official_download_page': {
- 'base_url': 'https://downloads.apache.org/airflow/helm-chart',
- 'closer_lua_url': 'https://www.apache.org/dyn/closer.lua/airflow/helm-chart',
- 'package_name': PACKAGE_NAME,
- 'package_version': PACKAGE_VERSION,
+ "official_download_page": {
+ "base_url": "https://downloads.apache.org/airflow/helm-chart",
+ "closer_lua_url": "https://www.apache.org/dyn/closer.lua/airflow/helm-chart",
+ "package_name": PACKAGE_NAME,
+ "package_version": PACKAGE_VERSION,
},
}
@@ -539,69 +539,69 @@ elif PACKAGE_NAME == 'helm-chart':
# This value contains a list of modules to be mocked up. This is useful when some external dependencies
# are not met at build time and break the building process.
autodoc_mock_imports = [
- 'MySQLdb',
- 'adal',
- 'analytics',
- 'azure',
- 'azure.cosmos',
- 'azure.datalake',
- 'azure.kusto',
- 'azure.mgmt',
- 'boto3',
- 'botocore',
- 'bson',
- 'cassandra',
- 'celery',
- 'cloudant',
- 'cryptography',
- 'datadog',
- 'distributed',
- 'docker',
- 'google',
- 'google_auth_httplib2',
- 'googleapiclient',
- 'grpc',
- 'hdfs',
- 'httplib2',
- 'jaydebeapi',
- 'jenkins',
- 'jira',
- 'kubernetes',
- 'msrestazure',
- 'oss2',
- 'oracledb',
- 'pandas',
- 'pandas_gbq',
- 'paramiko',
- 'pinotdb',
- 'psycopg2',
- 'pydruid',
- 'pyhive',
- 'pyhive',
- 'pymongo',
- 'pymssql',
- 'pysftp',
- 'qds_sdk',
- 'redis',
- 'simple_salesforce',
- 'slack_sdk',
- 'smbclient',
- 'snowflake',
- 'sqlalchemy-drill',
- 'sshtunnel',
- 'telegram',
- 'tenacity',
- 'vertica_python',
- 'winrm',
- 'zenpy',
+ "MySQLdb",
+ "adal",
+ "analytics",
+ "azure",
+ "azure.cosmos",
+ "azure.datalake",
+ "azure.kusto",
+ "azure.mgmt",
+ "boto3",
+ "botocore",
+ "bson",
+ "cassandra",
+ "celery",
+ "cloudant",
+ "cryptography",
+ "datadog",
+ "distributed",
+ "docker",
+ "google",
+ "google_auth_httplib2",
+ "googleapiclient",
+ "grpc",
+ "hdfs",
+ "httplib2",
+ "jaydebeapi",
+ "jenkins",
+ "jira",
+ "kubernetes",
+ "msrestazure",
+ "oss2",
+ "oracledb",
+ "pandas",
+ "pandas_gbq",
+ "paramiko",
+ "pinotdb",
+ "psycopg2",
+ "pydruid",
+ "pyhive",
+ "pyhive",
+ "pymongo",
+ "pymssql",
+ "pysftp",
+ "qds_sdk",
+ "redis",
+ "simple_salesforce",
+ "slack_sdk",
+ "smbclient",
+ "snowflake",
+ "sqlalchemy-drill",
+ "sshtunnel",
+ "telegram",
+ "tenacity",
+ "vertica_python",
+ "winrm",
+ "zenpy",
]
# The default options for autodoc directives. They are applied to all autodoc directives automatically.
-autodoc_default_options = {'show-inheritance': True, 'members': True}
+autodoc_default_options = {"show-inheritance": True, "members": True}
-autodoc_typehints = 'description'
-autodoc_typehints_description_target = 'documented'
-autodoc_typehints_format = 'short'
+autodoc_typehints = "description"
+autodoc_typehints_description_target = "documented"
+autodoc_typehints_format = "short"
# -- Options for sphinx.ext.intersphinx ----------------------------------------
@@ -611,52 +611,52 @@ autodoc_typehints_format = 'short'
# be linked to in this documentation.
# Inventories are only downloaded once by docs/exts/docs_build/fetch_inventories.py.
intersphinx_mapping = {
- pkg_name: (f"{THIRD_PARTY_INDEXES[pkg_name]}/", (f'{INVENTORY_CACHE_DIR}/{pkg_name}/objects.inv',))
+ pkg_name: (f"{THIRD_PARTY_INDEXES[pkg_name]}/", (f"{INVENTORY_CACHE_DIR}/{pkg_name}/objects.inv",))
for pkg_name in [
- 'boto3',
- 'celery',
- 'docker',
- 'hdfs',
- 'jinja2',
- 'mongodb',
- 'pandas',
- 'python',
- 'requests',
- 'sqlalchemy',
+ "boto3",
+ "celery",
+ "docker",
+ "hdfs",
+ "jinja2",
+ "mongodb",
+ "pandas",
+ "python",
+ "requests",
+ "sqlalchemy",
]
}
-if PACKAGE_NAME in ('apache-airflow-providers-google', 'apache-airflow'):
+if PACKAGE_NAME in ("apache-airflow-providers-google", "apache-airflow"):
intersphinx_mapping.update(
{
pkg_name: (
f"{THIRD_PARTY_INDEXES[pkg_name]}/",
- (f'{INVENTORY_CACHE_DIR}/{pkg_name}/objects.inv',),
+ (f"{INVENTORY_CACHE_DIR}/{pkg_name}/objects.inv",),
)
for pkg_name in [
- 'google-api-core',
- 'google-cloud-automl',
- 'google-cloud-bigquery',
- 'google-cloud-bigquery-datatransfer',
- 'google-cloud-bigquery-storage',
- 'google-cloud-bigtable',
- 'google-cloud-container',
- 'google-cloud-core',
- 'google-cloud-datacatalog',
- 'google-cloud-datastore',
- 'google-cloud-dlp',
- 'google-cloud-kms',
- 'google-cloud-language',
- 'google-cloud-monitoring',
- 'google-cloud-pubsub',
- 'google-cloud-redis',
- 'google-cloud-spanner',
- 'google-cloud-speech',
- 'google-cloud-storage',
- 'google-cloud-tasks',
- 'google-cloud-texttospeech',
- 'google-cloud-translate',
- 'google-cloud-videointelligence',
- 'google-cloud-vision',
+ "google-api-core",
+ "google-cloud-automl",
+ "google-cloud-bigquery",
+ "google-cloud-bigquery-datatransfer",
+ "google-cloud-bigquery-storage",
+ "google-cloud-bigtable",
+ "google-cloud-container",
+ "google-cloud-core",
+ "google-cloud-datacatalog",
+ "google-cloud-datastore",
+ "google-cloud-dlp",
+ "google-cloud-kms",
+ "google-cloud-language",
+ "google-cloud-monitoring",
+ "google-cloud-pubsub",
+ "google-cloud-redis",
+ "google-cloud-spanner",
+ "google-cloud-speech",
+ "google-cloud-storage",
+ "google-cloud-tasks",
+ "google-cloud-texttospeech",
+ "google-cloud-translate",
+ "google-cloud-videointelligence",
+ "google-cloud-vision",
]
}
)
@@ -675,41 +675,41 @@ viewcode_follow_imported_members = True
# your API documentation from.
autoapi_dirs: list[os.PathLike] = []
-if PACKAGE_NAME != 'docker-stack':
+if PACKAGE_NAME != "docker-stack":
autoapi_dirs.append(PACKAGE_DIR)
if SYSTEM_TESTS_DIR and os.path.exists(SYSTEM_TESTS_DIR):
autoapi_dirs.append(SYSTEM_TESTS_DIR)
# A directory that has user-defined templates to override our default templates.
-if PACKAGE_NAME == 'apache-airflow':
- autoapi_template_dir = 'autoapi_templates'
+if PACKAGE_NAME == "apache-airflow":
+ autoapi_template_dir = "autoapi_templates"
# A list of patterns to ignore when finding files
autoapi_ignore = [
- '*/airflow/_vendor/*',
- '*/_internal*',
- '*/node_modules/*',
- '*/migrations/*',
- '*/contrib/*',
- '**/example_sla_dag.py',
- '**/example_taskflow_api_docker_virtualenv.py',
- '**/example_dag_decorator.py',
+ "*/airflow/_vendor/*",
+ "*/_internal*",
+ "*/node_modules/*",
+ "*/migrations/*",
+ "*/contrib/*",
+ "**/example_sla_dag.py",
+ "**/example_taskflow_api_docker_virtualenv.py",
+ "**/example_dag_decorator.py",
]
-if PACKAGE_NAME == 'apache-airflow':
- autoapi_ignore.append('*/airflow/providers/*')
-elif PACKAGE_NAME == 'docker-stack':
- autoapi_ignore.append('*/airflow/providers/*')
+if PACKAGE_NAME == "apache-airflow":
+ autoapi_ignore.append("*/airflow/providers/*")
+elif PACKAGE_NAME == "docker-stack":
+ autoapi_ignore.append("*/airflow/providers/*")
else:
- autoapi_ignore.append('*/airflow/providers/cncf/kubernetes/backcompat/*')
- autoapi_ignore.append('*/example_dags/*')
+ autoapi_ignore.append("*/airflow/providers/cncf/kubernetes/backcompat/*")
+ autoapi_ignore.append("*/example_dags/*")
# Keep the AutoAPI generated files on the filesystem after the run.
# Useful for debugging.
autoapi_keep_files = True
# Relative path to output the AutoAPI files into. This can also be used to place the generated documentation
# anywhere in your documentation hierarchy.
-autoapi_root = '_api'
+autoapi_root = "_api"
# Whether to insert the generated documentation into the TOC tree. If this is False, the default AutoAPI
# index page is not generated and you will need to include the generated documentation in a
@@ -718,11 +718,11 @@ autoapi_add_toctree_entry = False
# By default autoapi will include private members -- we don't want that!
autoapi_options = [
- 'members',
- 'undoc-members',
- 'show-inheritance',
- 'show-module-summary',
- 'special-members',
+ "members",
+ "undoc-members",
+ "show-inheritance",
+ "show-module-summary",
+ "special-members",
]
suppress_warnings = [
@@ -730,37 +730,37 @@ suppress_warnings = [
]
# -- Options for ext.exampleinclude --------------------------------------------
-exampleinclude_sourceroot = os.path.abspath('..')
+exampleinclude_sourceroot = os.path.abspath("..")
# -- Options for ext.redirects -------------------------------------------------
-redirects_file = 'redirects.txt'
+redirects_file = "redirects.txt"
# -- Options for sphinxcontrib-spelling ----------------------------------------
-spelling_word_list_filename = [os.path.join(CONF_DIR, 'spelling_wordlist.txt')]
-if PACKAGE_NAME == 'apache-airflow':
- spelling_exclude_patterns = ['project.rst', 'changelog.rst']
-if PACKAGE_NAME == 'helm-chart':
- spelling_exclude_patterns = ['changelog.rst']
+spelling_word_list_filename = [os.path.join(CONF_DIR, "spelling_wordlist.txt")]
+if PACKAGE_NAME == "apache-airflow":
+ spelling_exclude_patterns = ["project.rst", "changelog.rst"]
+if PACKAGE_NAME == "helm-chart":
+ spelling_exclude_patterns = ["changelog.rst"]
spelling_ignore_contributor_names = False
spelling_ignore_importable_modules = True
-graphviz_output_format = 'svg'
+graphviz_output_format = "svg"
# -- Options for sphinxcontrib.redoc -------------------------------------------
# See: https://sphinxcontrib-redoc.readthedocs.io/en/stable/
-if PACKAGE_NAME == 'apache-airflow':
+if PACKAGE_NAME == "apache-airflow":
OPENAPI_FILE = os.path.join(
os.path.dirname(__file__), "..", "airflow", "api_connexion", "openapi", "v1.yaml"
)
redoc = [
{
- 'name': 'Airflow REST API',
- 'page': 'stable-rest-api-ref',
- 'spec': OPENAPI_FILE,
- 'opts': {
- 'hide-hostname': True,
- 'no-auto-auth': True,
+ "name": "Airflow REST API",
+ "page": "stable-rest-api-ref",
+ "spec": OPENAPI_FILE,
+ "opts": {
+ "hide-hostname": True,
+ "no-auto-auth": True,
},
},
]
@@ -776,5 +776,5 @@ def skip_util_classes(app, what, name, obj, skip, options):
def setup(sphinx):
- if 'autoapi.extension' in extensions:
+ if "autoapi.extension" in extensions:
sphinx.connect("autoapi-skip-member", skip_util_classes)
diff --git a/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py b/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py
index b00709c42a..54f59331e0 100644
--- a/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py
+++ b/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py
@@ -29,19 +29,19 @@ from airflow.operators.empty import EmptyOperator
now = pendulum.now(tz="UTC")
now_to_the_hour = (now - datetime.timedelta(0, 0, 0, 0, 0, 3)).replace(minute=0, second=0, microsecond=0)
START_DATE = now_to_the_hour
-DAG_NAME = 'test_dag_v1'
+DAG_NAME = "test_dag_v1"
dag = DAG(
DAG_NAME,
- schedule='*/10 * * * *',
- default_args={'depends_on_past': True},
+ schedule="*/10 * * * *",
+ default_args={"depends_on_past": True},
start_date=pendulum.datetime(2021, 1, 1, tz="UTC"),
catchup=False,
)
-run_this_1 = EmptyOperator(task_id='run_this_1', dag=dag)
-run_this_2 = EmptyOperator(task_id='run_this_2', dag=dag)
+run_this_1 = EmptyOperator(task_id="run_this_1", dag=dag)
+run_this_2 = EmptyOperator(task_id="run_this_2", dag=dag)
run_this_2.set_upstream(run_this_1)
-run_this_3 = EmptyOperator(task_id='run_this_3', dag=dag)
+run_this_3 = EmptyOperator(task_id="run_this_3", dag=dag)
run_this_3.set_upstream(run_this_2)
# [END dag]
diff --git a/docs/exts/airflow_intersphinx.py b/docs/exts/airflow_intersphinx.py
index 0ccf46151b..b9255d2e95 100644
--- a/docs/exts/airflow_intersphinx.py
+++ b/docs/exts/airflow_intersphinx.py
@@ -25,14 +25,14 @@ from sphinx.application import Sphinx
CURRENT_DIR = os.path.dirname(__file__)
ROOT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir))
-DOCS_DIR = os.path.join(ROOT_DIR, 'docs')
-DOCS_PROVIDER_DIR = os.path.join(ROOT_DIR, 'docs')
+DOCS_DIR = os.path.join(ROOT_DIR, "docs")
+DOCS_PROVIDER_DIR = os.path.join(ROOT_DIR, "docs")
def _create_init_py(app, config):
del app
# del config
- intersphinx_mapping = getattr(config, 'intersphinx_mapping', None) or {}
+ intersphinx_mapping = getattr(config, "intersphinx_mapping", None) or {}
providers_mapping = _generate_provider_intersphinx_mapping()
intersphinx_mapping.update(providers_mapping)
@@ -42,17 +42,17 @@ def _create_init_py(app, config):
def _generate_provider_intersphinx_mapping():
airflow_mapping = {}
- for_production = os.environ.get('AIRFLOW_FOR_PRODUCTION', 'false') == 'true'
- current_version = 'stable' if for_production else 'latest'
+ for_production = os.environ.get("AIRFLOW_FOR_PRODUCTION", "false") == "true"
+ current_version = "stable" if for_production else "latest"
for provider in load_package_data():
- package_name = provider['package-name']
- if os.environ.get('AIRFLOW_PACKAGE_NAME') == package_name:
+ package_name = provider["package-name"]
+ if os.environ.get("AIRFLOW_PACKAGE_NAME") == package_name:
continue
- provider_base_url = f'/docs/{package_name}/{current_version}/'
- doc_inventory = f'{DOCS_DIR}/_build/docs/{package_name}/{current_version}/objects.inv'
- cache_inventory = f'{DOCS_DIR}/_inventory_cache/{package_name}/objects.inv'
+ provider_base_url = f"/docs/{package_name}/{current_version}/"
+ doc_inventory = f"{DOCS_DIR}/_build/docs/{package_name}/{current_version}/objects.inv"
+ cache_inventory = f"{DOCS_DIR}/_inventory_cache/{package_name}/objects.inv"
# Skip adding the mapping if the path does not exist
if not os.path.exists(doc_inventory) and not os.path.exists(cache_inventory):
@@ -63,26 +63,26 @@ def _generate_provider_intersphinx_mapping():
provider_base_url,
(doc_inventory if os.path.exists(doc_inventory) else cache_inventory,),
)
- for pkg_name in ["apache-airflow", 'helm-chart']:
- if os.environ.get('AIRFLOW_PACKAGE_NAME') == pkg_name:
+ for pkg_name in ["apache-airflow", "helm-chart"]:
+ if os.environ.get("AIRFLOW_PACKAGE_NAME") == pkg_name:
continue
- doc_inventory = f'{DOCS_DIR}/_build/docs/{pkg_name}/{current_version}/objects.inv'
- cache_inventory = f'{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv'
+ doc_inventory = f"{DOCS_DIR}/_build/docs/{pkg_name}/{current_version}/objects.inv"
+ cache_inventory = f"{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv"
airflow_mapping[pkg_name] = (
# base URI
f'/docs/{pkg_name}/{"stable" if for_production else "latest"}/',
(doc_inventory if os.path.exists(doc_inventory) else cache_inventory,),
)
- for pkg_name in ['apache-airflow-providers', 'docker-stack']:
- if os.environ.get('AIRFLOW_PACKAGE_NAME') == pkg_name:
+ for pkg_name in ["apache-airflow-providers", "docker-stack"]:
+ if os.environ.get("AIRFLOW_PACKAGE_NAME") == pkg_name:
continue
- doc_inventory = f'{DOCS_DIR}/_build/docs/{pkg_name}/objects.inv'
- cache_inventory = f'{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv'
+ doc_inventory = f"{DOCS_DIR}/_build/docs/{pkg_name}/objects.inv"
+ cache_inventory = f"{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv"
airflow_mapping[pkg_name] = (
# base URI
- f'/docs/{pkg_name}/',
+ f"/docs/{pkg_name}/",
(doc_inventory if os.path.exists(doc_inventory) else cache_inventory,),
)
@@ -112,7 +112,7 @@ if __name__ == "__main__":
user_agent = None
class _MockApp:
- srcdir = ''
+ srcdir = ""
config = _MockConfig()
def warn(self, msg: str) -> None:
@@ -135,11 +135,11 @@ if __name__ == "__main__":
return inv_dict
def domain_and_object_type_to_role(domain: str, object_type: str) -> str:
- if domain == 'py':
+ if domain == "py":
from sphinx.domains.python import PythonDomain
role_name = PythonDomain.object_types[object_type].roles[0]
- elif domain == 'std':
+ elif domain == "std":
from sphinx.domains.std import StandardDomain
role_name = StandardDomain.object_types[object_type].roles[0]
@@ -158,7 +158,7 @@ if __name__ == "__main__":
except ValueError as exc:
print(exc.args[0] % exc.args[1:])
except Exception as exc:
- print(f'Unknown error: {exc!r}')
+ print(f"Unknown error: {exc!r}")
provider_mapping = _generate_provider_intersphinx_mapping()
diff --git a/docs/exts/docs_build/code_utils.py b/docs/exts/docs_build/code_utils.py
index e6a84553d7..8bd435af82 100644
--- a/docs/exts/docs_build/code_utils.py
+++ b/docs/exts/docs_build/code_utils.py
@@ -29,7 +29,7 @@ DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs")
AIRFLOW_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow")
ALL_PROVIDER_YAMLS = load_package_data()
-AIRFLOW_SITE_DIR: str = os.environ.get('AIRFLOW_SITE_DIRECTORY') or ''
+AIRFLOW_SITE_DIR: str = os.environ.get("AIRFLOW_SITE_DIRECTORY") or ""
PROCESS_TIMEOUT = 8 * 60 # 400 seconds
CONSOLE_WIDTH = 180
diff --git a/docs/exts/docs_build/dev_index_generator.py b/docs/exts/docs_build/dev_index_generator.py
index 2b127cc0ef..f423ed20f7 100644
--- a/docs/exts/docs_build/dev_index_generator.py
+++ b/docs/exts/docs_build/dev_index_generator.py
@@ -27,7 +27,7 @@ from docs.exts.provider_yaml_utils import load_package_data
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir))
-BUILD_DIR = os.path.abspath(os.path.join(DOCS_DIR, '_build'))
+BUILD_DIR = os.path.abspath(os.path.join(DOCS_DIR, "_build"))
ALL_PROVIDER_YAMLS = load_package_data()
@@ -51,14 +51,14 @@ def _render_content():
current_provider = next(
provider_yaml
for provider_yaml in ALL_PROVIDER_YAMLS
- if provider_yaml['package-name'] == package_name
+ if provider_yaml["package-name"] == package_name
)
providers.append(current_provider)
except StopIteration:
raise Exception(f"Could not find provider.yaml file for package: {package_name}")
content = _render_template(
- 'dev_index_template.html.jinja2', providers=sorted(providers, key=lambda k: k['package-name'])
+ "dev_index_template.html.jinja2", providers=sorted(providers, key=lambda k: k["package-name"])
)
return content
@@ -74,8 +74,8 @@ def generate_index(out_file: str) -> None:
output_file.write(content)
-if __name__ == '__main__':
+if __name__ == "__main__":
parser = argparse.ArgumentParser()
- parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
+ parser.add_argument("outfile", nargs="?", type=argparse.FileType("w"), default=sys.stdout)
args = parser.parse_args()
args.outfile.write(_render_content())
diff --git a/docs/exts/docs_build/docs_builder.py b/docs/exts/docs_build/docs_builder.py
index 362472c0f2..d6b01d7239 100644
--- a/docs/exts/docs_build/docs_builder.py
+++ b/docs/exts/docs_build/docs_builder.py
@@ -60,7 +60,7 @@ class AirflowDocsBuilder:
"""Is current documentation package versioned?"""
# Disable versioning. This documentation does not apply to any released product and we can update
# it as needed, i.e. with each new package of providers.
- return self.package_name not in ('apache-airflow-providers', 'docker-stack')
+ return self.package_name not in ("apache-airflow-providers", "docker-stack")
@property
def _build_dir(self) -> str:
@@ -94,14 +94,14 @@ class AirflowDocsBuilder:
def _current_version(self):
if not self.is_versioned:
raise Exception("This documentation package is not versioned")
- if self.package_name == 'apache-airflow':
+ if self.package_name == "apache-airflow":
from airflow.version import version as airflow_version
return airflow_version
- if self.package_name.startswith('apache-airflow-providers-'):
- provider = next(p for p in ALL_PROVIDER_YAMLS if p['package-name'] == self.package_name)
- return provider['versions'][0]
- if self.package_name == 'helm-chart':
+ if self.package_name.startswith("apache-airflow-providers-"):
+ provider = next(p for p in ALL_PROVIDER_YAMLS if p["package-name"] == self.package_name)
+ return provider["versions"][0]
+ if self.package_name == "helm-chart":
return chart_version()
return Exception(f"Unsupported package: {self.package_name}")
@@ -153,9 +153,9 @@ class AirflowDocsBuilder:
]
env = os.environ.copy()
- env['AIRFLOW_PACKAGE_NAME'] = self.package_name
+ env["AIRFLOW_PACKAGE_NAME"] = self.package_name
if self.for_production:
- env['AIRFLOW_FOR_PRODUCTION'] = 'true'
+ env["AIRFLOW_FOR_PRODUCTION"] = "true"
if verbose:
console.print(
f"[info]{self.package_name:60}:[/] Executing cmd: ",
@@ -228,9 +228,9 @@ class AirflowDocsBuilder:
self._build_dir, # path to output directory
]
env = os.environ.copy()
- env['AIRFLOW_PACKAGE_NAME'] = self.package_name
+ env["AIRFLOW_PACKAGE_NAME"] = self.package_name
if self.for_production:
- env['AIRFLOW_FOR_PRODUCTION'] = 'true'
+ env["AIRFLOW_FOR_PRODUCTION"] = "true"
if verbose:
console.print(
f"[info]{self.package_name:60}:[/] Executing cmd: ",
@@ -298,7 +298,7 @@ class AirflowDocsBuilder:
def get_available_providers_packages():
"""Get list of all available providers packages to build."""
- return [provider['package-name'] for provider in ALL_PROVIDER_YAMLS]
+ return [provider["package-name"] for provider in ALL_PROVIDER_YAMLS]
def get_available_packages():
diff --git a/docs/exts/docs_build/errors.py b/docs/exts/docs_build/errors.py
index 8403ad94bd..187b89c15a 100644
--- a/docs/exts/docs_build/errors.py
+++ b/docs/exts/docs_build/errors.py
@@ -48,8 +48,8 @@ class DocBuildError(NamedTuple):
return not self == other
def __lt__(self, right):
- file_path_a = self.file_path or ''
- file_path_b = right.file_path or ''
+ file_path_a = self.file_path or ""
+ file_path_b = right.file_path or ""
line_no_a = self.line_no or 0
line_no_b = right.line_no or 0
left = (file_path_a, line_no_a, self.message)
diff --git a/docs/exts/docs_build/fetch_inventories.py b/docs/exts/docs_build/fetch_inventories.py
index ccf50a99e0..a104cadf77 100644
--- a/docs/exts/docs_build/fetch_inventories.py
+++ b/docs/exts/docs_build/fetch_inventories.py
@@ -36,9 +36,9 @@ from docs.exts.docs_build.third_party_inventories import THIRD_PARTY_INDEXES
CURRENT_DIR = os.path.dirname(__file__)
ROOT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir, os.pardir))
-DOCS_DIR = os.path.join(ROOT_DIR, 'docs')
-CACHE_DIR = os.path.join(DOCS_DIR, '_inventory_cache')
-EXPIRATION_DATE_PATH = os.path.join(DOCS_DIR, '_inventory_cache', "expiration-date")
+DOCS_DIR = os.path.join(ROOT_DIR, "docs")
+CACHE_DIR = os.path.join(DOCS_DIR, "_inventory_cache")
+EXPIRATION_DATE_PATH = os.path.join(DOCS_DIR, "_inventory_cache", "expiration-date")
S3_DOC_URL = "http://apache-airflow-docs.s3-website.eu-central-1.amazonaws.com"
S3_DOC_URL_VERSIONED = S3_DOC_URL + "/docs/{package_name}/latest/objects.inv"
@@ -62,7 +62,7 @@ def _fetch_file(session: requests.Session, package_name: str, url: str, path: st
return package_name, False
os.makedirs(os.path.dirname(path), exist_ok=True)
- with open(path, 'wb') as f:
+ with open(path, "wb") as f:
response.raw.decode_content = True
shutil.copyfileobj(response.raw, f)
print(f"Fetched inventory: {url}")
@@ -86,30 +86,30 @@ def fetch_inventories():
(
pkg_name,
S3_DOC_URL_VERSIONED.format(package_name=pkg_name),
- f'{CACHE_DIR}/{pkg_name}/objects.inv',
+ f"{CACHE_DIR}/{pkg_name}/objects.inv",
)
)
- for pkg_name in ['apache-airflow', 'helm-chart']:
+ for pkg_name in ["apache-airflow", "helm-chart"]:
to_download.append(
(
pkg_name,
S3_DOC_URL_VERSIONED.format(package_name=pkg_name),
- f'{CACHE_DIR}/{pkg_name}/objects.inv',
+ f"{CACHE_DIR}/{pkg_name}/objects.inv",
)
)
- for pkg_name in ['apache-airflow-providers', 'docker-stack']:
+ for pkg_name in ["apache-airflow-providers", "docker-stack"]:
to_download.append(
(
pkg_name,
S3_DOC_URL_NON_VERSIONED.format(package_name=pkg_name),
- f'{CACHE_DIR}/{pkg_name}/objects.inv',
+ f"{CACHE_DIR}/{pkg_name}/objects.inv",
)
)
to_download.extend(
(
pkg_name,
f"{doc_url}/objects.inv",
- f'{CACHE_DIR}/{pkg_name}/objects.inv',
+ f"{CACHE_DIR}/{pkg_name}/objects.inv",
)
for pkg_name, doc_url in THIRD_PARTY_INDEXES.items()
)
diff --git a/docs/exts/docs_build/github_action_utils.py b/docs/exts/docs_build/github_action_utils.py
index 37e9ba24ad..978e8c86d1 100644
--- a/docs/exts/docs_build/github_action_utils.py
+++ b/docs/exts/docs_build/github_action_utils.py
@@ -29,7 +29,7 @@ def with_group(title):
For more information, see:
https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#grouping-log-lines
"""
- if os.environ.get('GITHUB_ACTIONS', 'false') != "true":
+ if os.environ.get("GITHUB_ACTIONS", "false") != "true":
print("#" * 20, title, "#" * 20)
yield
return
diff --git a/docs/exts/docs_build/lint_checks.py b/docs/exts/docs_build/lint_checks.py
index 1021c1c085..b77b47f03f 100644
--- a/docs/exts/docs_build/lint_checks.py
+++ b/docs/exts/docs_build/lint_checks.py
@@ -275,7 +275,7 @@ def check_example_dags_in_provider_tocs() -> list[DocBuildError]:
build_errors = []
for provider in ALL_PROVIDER_YAMLS:
- example_dags_dirs = list(find_example_dags(provider['package-dir']))
+ example_dags_dirs = list(find_example_dags(provider["package-dir"]))
if not example_dags_dirs:
continue
doc_file_path = f"{DOCS_DIR}/{provider['package-name']}/index.rst"
diff --git a/docs/exts/docs_build/spelling_checks.py b/docs/exts/docs_build/spelling_checks.py
index bac36f3e7d..bbaa9fa5dd 100644
--- a/docs/exts/docs_build/spelling_checks.py
+++ b/docs/exts/docs_build/spelling_checks.py
@@ -64,12 +64,12 @@ class SpellingError(NamedTuple):
return not self == other
def __lt__(self, other):
- file_path_a = self.file_path or ''
- file_path_b = other.file_path or ''
+ file_path_a = self.file_path or ""
+ file_path_b = other.file_path or ""
line_no_a = self.line_no or 0
line_no_b = other.line_no or 0
- context_line_a = self.context_line or ''
- context_line_b = other.context_line or ''
+ context_line_a = self.context_line or ""
+ context_line_b = other.context_line or ""
left = (file_path_a, line_no_a, context_line_a, self.spelling, self.message)
right = (
file_path_b,
@@ -102,7 +102,7 @@ def parse_spelling_warnings(warning_text: str, docs_dir: str) -> list[SpellingEr
sphinx_spelling_errors.append(
SpellingError(
file_path=os.path.join(docs_dir, warning_parts[0]),
- line_no=int(warning_parts[1]) if warning_parts[1] not in ('None', '') else None,
+ line_no=int(warning_parts[1]) if warning_parts[1] not in ("None", "") else None,
spelling=warning_parts[2],
suggestion=warning_parts[3] if warning_parts[3] else None,
context_line=warning_parts[4],
diff --git a/docs/exts/docs_build/third_party_inventories.py b/docs/exts/docs_build/third_party_inventories.py
index 9fa8451601..7ac33a5333 100644
--- a/docs/exts/docs_build/third_party_inventories.py
+++ b/docs/exts/docs_build/third_party_inventories.py
@@ -17,38 +17,38 @@
from __future__ import annotations
THIRD_PARTY_INDEXES = {
- 'boto3': 'https://boto3.amazonaws.com/v1/documentation/api/latest',
- 'celery': 'https://docs.celeryq.dev/en/stable/',
- 'docker': 'https://docker-py.readthedocs.io/en/stable',
- 'hdfs': 'https://hdfscli.readthedocs.io/en/latest',
- 'jinja2': 'https://jinja.palletsprojects.com/en/2.11.x',
- 'mongodb': 'https://pymongo.readthedocs.io/en/3.11.3',
- 'pandas': 'https://pandas.pydata.org/pandas-docs/stable',
- 'python': 'https://docs.python.org/3',
- 'requests': 'https://requests.readthedocs.io/en/stable',
- 'sqlalchemy': 'https://docs.sqlalchemy.org/en/latest',
- 'google-api-core': 'https://googleapis.dev/python/google-api-core/latest',
- 'google-cloud-automl': 'https://googleapis.dev/python/automl/latest',
- 'google-cloud-bigquery': 'https://googleapis.dev/python/bigquery/latest',
- 'google-cloud-bigquery-datatransfer': 'https://googleapis.dev/python/bigquerydatatransfer/latest',
- 'google-cloud-bigquery-storage': 'https://googleapis.dev/python/bigquerystorage/latest',
- 'google-cloud-bigtable': 'https://googleapis.dev/python/bigtable/latest',
- 'google-cloud-container': 'https://googleapis.dev/python/container/latest',
- 'google-cloud-core': 'https://googleapis.dev/python/google-cloud-core/latest',
- 'google-cloud-datacatalog': 'https://googleapis.dev/python/datacatalog/latest',
- 'google-cloud-datastore': 'https://googleapis.dev/python/datastore/latest',
- 'google-cloud-dlp': 'https://googleapis.dev/python/dlp/latest',
- 'google-cloud-kms': 'https://googleapis.dev/python/cloudkms/latest',
- 'google-cloud-language': 'https://googleapis.dev/python/language/latest',
- 'google-cloud-monitoring': 'https://googleapis.dev/python/monitoring/latest',
- 'google-cloud-pubsub': 'https://googleapis.dev/python/pubsub/latest',
- 'google-cloud-redis': 'https://googleapis.dev/python/redis/latest',
- 'google-cloud-spanner': 'https://googleapis.dev/python/spanner/latest',
- 'google-cloud-speech': 'https://googleapis.dev/python/speech/latest',
- 'google-cloud-storage': 'https://googleapis.dev/python/storage/latest',
- 'google-cloud-tasks': 'https://googleapis.dev/python/cloudtasks/latest',
- 'google-cloud-texttospeech': 'https://googleapis.dev/python/texttospeech/latest',
- 'google-cloud-translate': 'https://googleapis.dev/python/translation/latest',
- 'google-cloud-videointelligence': 'https://googleapis.dev/python/videointelligence/latest',
- 'google-cloud-vision': 'https://googleapis.dev/python/vision/latest',
+ "boto3": "https://boto3.amazonaws.com/v1/documentation/api/latest",
+ "celery": "https://docs.celeryq.dev/en/stable/",
+ "docker": "https://docker-py.readthedocs.io/en/stable",
+ "hdfs": "https://hdfscli.readthedocs.io/en/latest",
+ "jinja2": "https://jinja.palletsprojects.com/en/2.11.x",
+ "mongodb": "https://pymongo.readthedocs.io/en/3.11.3",
+ "pandas": "https://pandas.pydata.org/pandas-docs/stable",
+ "python": "https://docs.python.org/3",
+ "requests": "https://requests.readthedocs.io/en/stable",
+ "sqlalchemy": "https://docs.sqlalchemy.org/en/latest",
+ "google-api-core": "https://googleapis.dev/python/google-api-core/latest",
+ "google-cloud-automl": "https://googleapis.dev/python/automl/latest",
+ "google-cloud-bigquery": "https://googleapis.dev/python/bigquery/latest",
+ "google-cloud-bigquery-datatransfer": "https://googleapis.dev/python/bigquerydatatransfer/latest",
+ "google-cloud-bigquery-storage": "https://googleapis.dev/python/bigquerystorage/latest",
+ "google-cloud-bigtable": "https://googleapis.dev/python/bigtable/latest",
+ "google-cloud-container": "https://googleapis.dev/python/container/latest",
+ "google-cloud-core": "https://googleapis.dev/python/google-cloud-core/latest",
+ "google-cloud-datacatalog": "https://googleapis.dev/python/datacatalog/latest",
+ "google-cloud-datastore": "https://googleapis.dev/python/datastore/latest",
+ "google-cloud-dlp": "https://googleapis.dev/python/dlp/latest",
+ "google-cloud-kms": "https://googleapis.dev/python/cloudkms/latest",
+ "google-cloud-language": "https://googleapis.dev/python/language/latest",
+ "google-cloud-monitoring": "https://googleapis.dev/python/monitoring/latest",
+ "google-cloud-pubsub": "https://googleapis.dev/python/pubsub/latest",
+ "google-cloud-redis": "https://googleapis.dev/python/redis/latest",
+ "google-cloud-spanner": "https://googleapis.dev/python/spanner/latest",
+ "google-cloud-speech": "https://googleapis.dev/python/speech/latest",
+ "google-cloud-storage": "https://googleapis.dev/python/storage/latest",
+ "google-cloud-tasks": "https://googleapis.dev/python/cloudtasks/latest",
+ "google-cloud-texttospeech": "https://googleapis.dev/python/texttospeech/latest",
+ "google-cloud-translate": "https://googleapis.dev/python/translation/latest",
+ "google-cloud-videointelligence": "https://googleapis.dev/python/videointelligence/latest",
+ "google-cloud-vision": "https://googleapis.dev/python/vision/latest",
}
diff --git a/docs/exts/exampleinclude.py b/docs/exts/exampleinclude.py
index 6832fe7ddc..7cc6ca8f81 100644
--- a/docs/exts/exampleinclude.py
+++ b/docs/exts/exampleinclude.py
@@ -151,7 +151,7 @@ def register_source(app, env, modname):
analyzer = ModuleAnalyzer.for_module(modname)
except Exception as ex:
logger.info(
- "Module \"%s\" could not be loaded. Full source will not be available. \"%s\"", modname, ex
+ 'Module "%s" could not be loaded. Full source will not be available. "%s"', modname, ex
)
# We cannot use regular warnings or exception methods because those warnings are interpreted
# by running python process and converted into "real" warnings, so we need to print the
@@ -249,5 +249,5 @@ def setup(app):
app.add_config_value("exampleinclude_sourceroot", None, "env")
if not airflow_theme_is_available:
# Sphinx airflow theme has its own styles.
- app.add_css_file('exampleinclude.css')
+ app.add_css_file("exampleinclude.css")
return {"version": "builtin", "parallel_read_safe": False, "parallel_write_safe": False}
diff --git a/docs/exts/extra_files_with_substitutions.py b/docs/exts/extra_files_with_substitutions.py
index e3e3f2d6b1..5cdaadd610 100644
--- a/docs/exts/extra_files_with_substitutions.py
+++ b/docs/exts/extra_files_with_substitutions.py
@@ -31,7 +31,7 @@ def copy_docker_compose(app, exception):
with open(path) as file:
with open(os.path.join(app.outdir, os.path.basename(path)), "w") as output_file:
for line in file:
- output_file.write(line.replace('|version|', app.config.version))
+ output_file.write(line.replace("|version|", app.config.version))
# Replace `|version|` in the installation files that requires manual substitutions (in links)
for path in app.config.manual_substitutions_in_generated_html:
@@ -41,16 +41,16 @@ def copy_docker_compose(app, exception):
os.path.join(app.outdir, os.path.dirname(path), os.path.basename(path)), "wt"
) as output_file:
for line in content:
- output_file.write(line.replace('|version|', app.config.version))
+ output_file.write(line.replace("|version|", app.config.version))
def setup(app):
"""Setup plugin"""
app.connect("build-finished", copy_docker_compose)
- app.add_config_value("html_extra_with_substitutions", [], '[str]')
- app.add_config_value("manual_substitutions_in_generated_html", [], '[str]')
+ app.add_config_value("html_extra_with_substitutions", [], "[str]")
+ app.add_config_value("manual_substitutions_in_generated_html", [], "[str]")
return {
- 'parallel_write_safe': True,
+ "parallel_write_safe": True,
}
diff --git a/docs/exts/operators_and_hooks_ref.py b/docs/exts/operators_and_hooks_ref.py
index 90dff7de49..a14ff0ad0f 100644
--- a/docs/exts/operators_and_hooks_ref.py
+++ b/docs/exts/operators_and_hooks_ref.py
@@ -34,7 +34,7 @@ from sphinx.util.docutils import switch_source_input
CMD_OPERATORS_AND_HOOKS = "operators-and-hooks"
-CMD_TRANSFERS = 'transfers'
+CMD_TRANSFERS = "transfers"
"""
Directives for rendering tables with operators.
@@ -47,7 +47,7 @@ DEFAULT_HEADER_SEPARATOR = "="
CURRENT_DIR = os.path.dirname(__file__)
ROOT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir))
-DOCS_DIR = os.path.join(ROOT_DIR, 'docs')
+DOCS_DIR = os.path.join(ROOT_DIR, "docs")
@lru_cache(maxsize=None)
@@ -72,7 +72,7 @@ def _docs_path(filepath: str):
_, _, provider, rest = filepath.split("/", maxsplit=3)
filepath = f"{provider}:{rest}"
else:
- filepath = os.path.join(ROOT_DIR, filepath.lstrip('/'))
+ filepath = os.path.join(ROOT_DIR, filepath.lstrip("/"))
filepath = os.path.relpath(filepath, DOCS_DIR)
len_rst = len(".rst")
@@ -82,7 +82,7 @@ def _docs_path(filepath: str):
def _prepare_resource_index(package_data, resource_type):
return {
- integration["integration-name"]: {**integration, 'package-name': provider['package-name']}
+ integration["integration-name"]: {**integration, "package-name": provider["package-name"]}
for provider in package_data
for integration in provider.get(resource_type, [])
}
@@ -107,18 +107,18 @@ def _prepare_operators_data(tags: set[str] | None):
item = {
"integration": integration,
}
- operators = all_operators_by_integration.get(integration['integration-name'])
- sensors = all_sensors_by_integration.get(integration['integration-name'])
- hooks = all_hooks_by_integration.get(integration['integration-name'])
+ operators = all_operators_by_integration.get(integration["integration-name"])
+ sensors = all_sensors_by_integration.get(integration["integration-name"])
+ hooks = all_hooks_by_integration.get(integration["integration-name"])
- if 'how-to-guide' in item['integration']:
- item['integration']['how-to-guide'] = [_docs_path(d) for d in item['integration']['how-to-guide']]
+ if "how-to-guide" in item["integration"]:
+ item["integration"]["how-to-guide"] = [_docs_path(d) for d in item["integration"]["how-to-guide"]]
if operators:
- item['operators'] = operators
+ item["operators"] = operators
if sensors:
- item['hooks'] = sensors
+ item["hooks"] = sensors
if hooks:
- item['hooks'] = hooks
+ item["hooks"] = hooks
if operators or sensors or hooks:
results.append(item)
@@ -142,9 +142,9 @@ def _prepare_transfer_data(tags: set[str] | None):
all_transfers = [
{
**transfer,
- 'package-name': provider['package-name'],
- 'source-integration': all_operators_by_integration[transfer['source-integration-name']],
- 'target-integration': all_operators_by_integration[transfer['target-integration-name']],
+ "package-name": provider["package-name"],
+ "source-integration": all_operators_by_integration[transfer["source-integration-name"]],
+ "target-integration": all_operators_by_integration[transfer["target-integration-name"]],
}
for provider in package_data
for transfer in provider.get("transfers", [])
@@ -155,14 +155,14 @@ def _prepare_transfer_data(tags: set[str] | None):
to_display_transfers = [
transfer
for transfer in all_transfers
- if tags.intersection(transfer['source-integration'].get('tags', set()))
- or tags.intersection(transfer['target-integration'].get('tags', set()))
+ if tags.intersection(transfer["source-integration"].get("tags", set()))
+ or tags.intersection(transfer["target-integration"].get("tags", set()))
]
for transfer in to_display_transfers:
- if 'how-to-guide' not in transfer:
+ if "how-to-guide" not in transfer:
continue
- transfer['how-to-guide'] = _docs_path(transfer['how-to-guide'])
+ transfer["how-to-guide"] = _docs_path(transfer["how-to-guide"])
return to_display_transfers
@@ -180,8 +180,8 @@ def _prepare_logging_data():
for provider in package_data:
logging_handlers = provider.get("logging")
if logging_handlers:
- package_name = provider['package-name']
- all_logging[package_name] = {'name': provider['name'], 'handlers': logging_handlers}
+ package_name = provider["package-name"]
+ all_logging[package_name] = {"name": provider["name"], "handlers": logging_handlers}
return all_logging
@@ -197,8 +197,8 @@ def _prepare_auth_backend_data():
for provider in package_data:
auth_backends_list = provider.get("auth-backends")
if auth_backends_list:
- package_name = provider['package-name']
- all_auth_backends[package_name] = {'name': provider['name'], 'auth_backends': auth_backends_list}
+ package_name = provider["package-name"]
+ all_auth_backends[package_name] = {"name": provider["name"], "auth_backends": auth_backends_list}
return all_auth_backends
@@ -214,10 +214,10 @@ def _prepare_secrets_backend_data():
for provider in package_data:
secret_backends_list = provider.get("secrets-backends")
if secret_backends_list:
- package_name = provider['package-name']
+ package_name = provider["package-name"]
all_secret_backends[package_name] = {
- 'name': provider['name'],
- 'secrets_backends': secret_backends_list,
+ "name": provider["name"],
+ "secrets_backends": secret_backends_list,
}
return all_secret_backends
@@ -236,10 +236,10 @@ def _prepare_connections_data():
for provider in package_data:
connections_list = provider.get("connection-types")
if connections_list:
- package_name = provider['package-name']
+ package_name = provider["package-name"]
all_connections[package_name] = {
- 'name': provider['name'],
- 'connection_types': connections_list,
+ "name": provider["name"],
+ "connection_types": connections_list,
}
return all_connections
@@ -256,10 +256,10 @@ def _prepare_extra_links_data():
for provider in package_data:
extra_link_list = provider.get("extra-links")
if extra_link_list:
- package_name = provider['package-name']
+ package_name = provider["package-name"]
all_extra_links[package_name] = {
- 'name': provider['name'],
- 'extra_links': extra_link_list,
+ "name": provider["name"],
+ "extra_links": extra_link_list,
}
return all_extra_links
@@ -274,17 +274,17 @@ class BaseJinjaReferenceDirective(Directive):
"""The base directive for OperatorsHooksReferenceDirective and TransfersReferenceDirective"""
optional_arguments = 1
- option_spec = {"tags": directives.unchanged, 'header-separator': directives.unchanged_required}
+ option_spec = {"tags": directives.unchanged, "header-separator": directives.unchanged_required}
def run(self):
tags_arg = self.options.get("tags")
tags = {t.strip() for t in tags_arg.split(",")} if tags_arg else None
- header_separator = self.options.get('header-separator')
+ header_separator = self.options.get("header-separator")
new_content = self.render_content(tags=tags, header_separator=header_separator)
with switch_source_input(self.state, self.content):
- new_content = StringList(new_content.splitlines(), source='')
+ new_content = StringList(new_content.splitlines(), source="")
node = nodes.section() # type: Element
# necessary so that the child nodes get the right source/line set
node.document = self.state.document
@@ -369,29 +369,29 @@ class ExtraLinksDirective(BaseJinjaReferenceDirective):
def setup(app):
"""Setup plugin"""
- app.add_directive('operators-hooks-ref', OperatorsHooksReferenceDirective)
- app.add_directive('transfers-ref', TransfersReferenceDirective)
- app.add_directive('airflow-logging', LoggingDirective)
- app.add_directive('airflow-auth-backends', AuthBackendDirective)
- app.add_directive('airflow-secrets-backends', SecretsBackendDirective)
- app.add_directive('airflow-connections', ConnectionsDirective)
- app.add_directive('airflow-extra-links', ExtraLinksDirective)
+ app.add_directive("operators-hooks-ref", OperatorsHooksReferenceDirective)
+ app.add_directive("transfers-ref", TransfersReferenceDirective)
+ app.add_directive("airflow-logging", LoggingDirective)
+ app.add_directive("airflow-auth-backends", AuthBackendDirective)
+ app.add_directive("airflow-secrets-backends", SecretsBackendDirective)
+ app.add_directive("airflow-connections", ConnectionsDirective)
+ app.add_directive("airflow-extra-links", ExtraLinksDirective)
- return {'parallel_read_safe': True, 'parallel_write_safe': True}
+ return {"parallel_read_safe": True, "parallel_write_safe": True}
option_tag = click.option(
- '--tag',
+ "--tag",
multiple=True,
help="If passed, displays integrations that have a matching tag",
)
option_header_separator = click.option(
- '--header-separator', default=DEFAULT_HEADER_SEPARATOR, show_default=True
+ "--header-separator", default=DEFAULT_HEADER_SEPARATOR, show_default=True
)
-@click.group(context_settings={'help_option_names': ['-h', '--help'], 'max_content_width': 500})
+@click.group(context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 500})
def cli():
"""Render tables with integrations"""
diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py
index e4f7e52b90..97f9a7f100 100644
--- a/docs/exts/provider_yaml_utils.py
+++ b/docs/exts/provider_yaml_utils.py
@@ -70,8 +70,8 @@ def load_package_data() -> list[dict[str, Any]]:
except jsonschema.ValidationError:
raise Exception(f"Unable to parse: {provider_yaml_path}.")
provider_yaml_dir = os.path.dirname(provider_yaml_path)
- provider['python-module'] = _filepath_to_module(provider_yaml_dir)
- provider['package-dir'] = provider_yaml_dir
- provider['system-tests-dir'] = _filepath_to_system_tests(provider_yaml_dir)
+ provider["python-module"] = _filepath_to_module(provider_yaml_dir)
+ provider["package-dir"] = provider_yaml_dir
+ provider["system-tests-dir"] = _filepath_to_system_tests(provider_yaml_dir)
result.append(provider)
return result
diff --git a/docs/exts/providers_packages_ref.py b/docs/exts/providers_packages_ref.py
index 6f7d49bd44..1f5a3ac1e5 100644
--- a/docs/exts/providers_packages_ref.py
+++ b/docs/exts/providers_packages_ref.py
@@ -22,19 +22,19 @@ from sphinx.application import Sphinx
def _on_config_inited(app, config):
del app
- jinja_context = getattr(config, 'jinja_contexts', None) or {}
+ jinja_context = getattr(config, "jinja_contexts", None) or {}
- jinja_context['providers_ctx'] = {'providers': load_package_data()}
+ jinja_context["providers_ctx"] = {"providers": load_package_data()}
config.jinja_contexts = jinja_context
def setup(app: Sphinx):
"""Setup plugin"""
- app.setup_extension('sphinx_jinja')
+ app.setup_extension("sphinx_jinja")
app.connect("config-inited", _on_config_inited)
app.add_crossref_type(
directivename="provider",
rolename="provider",
)
- return {'parallel_read_safe': True, 'parallel_write_safe': True}
+ return {"parallel_read_safe": True, "parallel_write_safe": True}
diff --git a/docs/exts/redirects.py b/docs/exts/redirects.py
index ee438fbdb0..7b19ad5b03 100644
--- a/docs/exts/redirects.py
+++ b/docs/exts/redirects.py
@@ -53,7 +53,7 @@ def generate_redirects(app):
log.debug("Redirecting '%s' to '%s'", from_path, to_path)
- from_path = from_path.replace(in_suffix, '.html')
+ from_path = from_path.replace(in_suffix, ".html")
to_path = to_path.replace(in_suffix, ".html")
to_path_prefix = f"..{os.path.sep}" * (len(from_path.split(os.path.sep)) - 1)
diff --git a/docs/exts/sphinx_script_update.py b/docs/exts/sphinx_script_update.py
index 393159e71a..990508d6c6 100644
--- a/docs/exts/sphinx_script_update.py
+++ b/docs/exts/sphinx_script_update.py
@@ -47,10 +47,10 @@ def _user_cache_dir(appname=None):
# Windows has a complex procedure to download the App Dir directory because this directory can be
# changed in window registry, so i use temporary directory for cache
path = os.path.join(tempfile.gettempdir(), appname)
- elif sys.platform == 'darwin':
- path = os.path.expanduser('~/Library/Caches')
+ elif sys.platform == "darwin":
+ path = os.path.expanduser("~/Library/Caches")
else:
- path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
+ path = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
path = os.path.join(path, appname)
return path
@@ -88,10 +88,10 @@ def fetch_and_cache(script_url: str, output_filename: str):
output_file.write(res.content)
# Save cache metadata, if needed
- etag = res.headers.get('etag', None)
+ etag = res.headers.get("etag", None)
if etag:
cache_metadata[cache_key] = etag
- with open(cache_metadata_filepath, 'w') as cache_file:
+ with open(cache_metadata_filepath, "w") as cache_file:
json.dump(cache_metadata, cache_file)
return cache_filepath
@@ -113,7 +113,7 @@ def build_finished(app, exception):
output_filename = "script.js"
cache_filepath = fetch_and_cache(script_url, output_filename)
- _copy_file(cache_filepath, os.path.join(app.builder.outdir, '_static', "redoc.js"))
+ _copy_file(cache_filepath, os.path.join(app.builder.outdir, "_static", "redoc.js"))
def setup(app):
diff --git a/docs/exts/substitution_extensions.py b/docs/exts/substitution_extensions.py
index c65573f54b..c0f2363650 100644
--- a/docs/exts/substitution_extensions.py
+++ b/docs/exts/substitution_extensions.py
@@ -31,9 +31,9 @@ from sphinx.transforms.post_transforms.code import HighlightLanguageTransform
LOGGER = logging.getLogger(__name__)
-OriginalCodeBlock: Directive = directives._directives['code-block']
+OriginalCodeBlock: Directive = directives._directives["code-block"]
-_SUBSTITUTION_OPTION_NAME = 'substitutions'
+_SUBSTITUTION_OPTION_NAME = "substitutions"
class SubstitutionCodeBlock(OriginalCodeBlock): # type: ignore
@@ -47,7 +47,7 @@ class SubstitutionCodeBlock(OriginalCodeBlock): # type: ignore
[node] = super().run()
if _SUBSTITUTION_OPTION_NAME in self.options:
- node.attributes['substitutions'] = True
+ node.attributes["substitutions"] = True
return [node]
@@ -77,7 +77,7 @@ class SubstitutionCodeBlockTransform(SphinxTransform):
old_child = child
for name, value in substitution_defs.items():
replacement = value.astext()
- child = nodes.Text(child.replace(f'|{name}|', replacement))
+ child = nodes.Text(child.replace(f"|{name}|", replacement))
node.replace(old_child, child)
# The highlighter checks this -- without this, it will refuse to apply highlighting
@@ -93,8 +93,8 @@ def substitution_code_role(*args, **kwargs) -> tuple[list[Node], list[system_mes
substitution_code_role.options = { # type: ignore
- 'class': directives.class_option,
- 'language': directives.unchanged,
+ "class": directives.class_option,
+ "language": directives.unchanged,
}
@@ -112,17 +112,17 @@ class AddSpacepadSubstReference(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
substitution_defs = self.document.substitution_defs
- version = substitution_defs['version'].astext()
+ version = substitution_defs["version"].astext()
pad = " " * len(version)
- substitution_defs['version-spacepad'] = nodes.substitution_definition(version, pad)
+ substitution_defs["version-spacepad"] = nodes.substitution_definition(version, pad)
...
def setup(app: Sphinx) -> dict:
"""Setup plugin"""
- app.add_config_value('substitutions', [], 'html')
- directives.register_directive('code-block', SubstitutionCodeBlock)
- app.add_role('subst-code', substitution_code_role)
+ app.add_config_value("substitutions", [], "html")
+ directives.register_directive("code-block", SubstitutionCodeBlock)
+ app.add_role("subst-code", substitution_code_role)
app.add_post_transform(SubstitutionCodeBlockTransform)
app.add_post_transform(AddSpacepadSubstReference)
- return {'parallel_write_safe': True}
+ return {"parallel_write_safe": True}
diff --git a/docs/publish_docs.py b/docs/publish_docs.py
index fea12a7ab3..6666b596af 100755
--- a/docs/publish_docs.py
+++ b/docs/publish_docs.py
@@ -25,7 +25,7 @@ from docs.exts.docs_build.docs_builder import AirflowDocsBuilder
from docs.exts.docs_build.package_filter import process_package_filters
from docs.exts.provider_yaml_utils import load_package_data
-AIRFLOW_SITE_DIR = os.environ.get('AIRFLOW_SITE_DIRECTORY')
+AIRFLOW_SITE_DIR = os.environ.get("AIRFLOW_SITE_DIRECTORY")
if __name__ != "__main__":
@@ -37,12 +37,12 @@ if __name__ != "__main__":
if not (
AIRFLOW_SITE_DIR
and os.path.isdir(AIRFLOW_SITE_DIR)
- and os.path.isdir(os.path.join(AIRFLOW_SITE_DIR, 'docs-archive'))
+ and os.path.isdir(os.path.join(AIRFLOW_SITE_DIR, "docs-archive"))
):
raise SystemExit(
- 'Before using this script, set the environment variable AIRFLOW_SITE_DIRECTORY. This variable '
- 'should contain the path to the airflow-site repository directory. '
- '${AIRFLOW_SITE_DIRECTORY}/docs-archive must exists.'
+ "Before using this script, set the environment variable AIRFLOW_SITE_DIRECTORY. This variable "
+ "should contain the path to the airflow-site repository directory. "
+ "${AIRFLOW_SITE_DIRECTORY}/docs-archive must exists."
)
ALL_PROVIDER_YAMLS = load_package_data()
@@ -50,7 +50,7 @@ ALL_PROVIDER_YAMLS = load_package_data()
def get_available_packages():
"""Get list of all available packages to build."""
- provider_package_names = [provider['package-name'] for provider in ALL_PROVIDER_YAMLS]
+ provider_package_names = [provider["package-name"] for provider in ALL_PROVIDER_YAMLS]
return [
"apache-airflow",
"docker-stack",
@@ -63,18 +63,18 @@ def get_available_packages():
def _get_parser():
available_packages_list = " * " + "\n * ".join(get_available_packages())
parser = argparse.ArgumentParser(
- description='Copies the built documentation to airflow-site repository.',
+ description="Copies the built documentation to airflow-site repository.",
epilog=f"List of supported documentation packages:\n{available_packages_list}",
)
parser.formatter_class = argparse.RawTextHelpFormatter
parser.add_argument(
- '--disable-checks', dest='disable_checks', action='store_true', help='Disables extra checks'
+ "--disable-checks", dest="disable_checks", action="store_true", help="Disables extra checks"
)
parser.add_argument(
- '--override-versioned',
- dest='override_versioned',
- action='store_true',
- help='Overrides versioned directories',
+ "--override-versioned",
+ dest="override_versioned",
+ action="store_true",
+ help="Overrides versioned directories",
)
parser.add_argument(
"--package-filter",
diff --git a/docs/rtd-deprecation/conf.py b/docs/rtd-deprecation/conf.py
index d38d1f385f..341c96b235 100644
--- a/docs/rtd-deprecation/conf.py
+++ b/docs/rtd-deprecation/conf.py
@@ -16,6 +16,6 @@
# under the License.
from __future__ import annotations
-project = 'Apache Airflow'
+project = "Apache Airflow"
html_extra_path = ["404.html"]
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 2e67be35b9..106fa15fea 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -44,6 +44,7 @@ analyzeSyntax
AnalyzeSyntaxResponse
Aneesh
AnnotateTextResponse
+ans
Ansible
apache
api
diff --git a/kubernetes_tests/test_base.py b/kubernetes_tests/test_base.py
index 71ccab9d26..a5a690881d 100644
--- a/kubernetes_tests/test_base.py
+++ b/kubernetes_tests/test_base.py
@@ -31,8 +31,8 @@ import requests.exceptions
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
-CLUSTER_FORWARDED_PORT = os.environ.get('CLUSTER_FORWARDED_PORT') or "8080"
-KUBERNETES_HOST_PORT = (os.environ.get('CLUSTER_HOST') or "localhost") + ":" + CLUSTER_FORWARDED_PORT
+CLUSTER_FORWARDED_PORT = os.environ.get("CLUSTER_FORWARDED_PORT") or "8080"
+KUBERNETES_HOST_PORT = (os.environ.get("CLUSTER_HOST") or "localhost") + ":" + CLUSTER_FORWARDED_PORT
EXECUTOR = os.environ.get("EXECUTOR")
print()
@@ -43,14 +43,14 @@ print()
class TestBase(unittest.TestCase):
def _describe_resources(self, namespace: str):
- kubeconfig_basename = os.path.basename(os.environ.get('KUBECONFIG', "default"))
+ kubeconfig_basename = os.path.basename(os.environ.get("KUBECONFIG", "default"))
output_file_path = (
Path(tempfile.gettempdir())
/ f"k8s_test_resources_{namespace}_{kubeconfig_basename}_{self.id()}.txt"
)
print(f"Dumping resources to {output_file_path}")
- ci = os.environ.get('CI')
- if ci and ci.lower() == 'true':
+ ci = os.environ.get("CI")
+ if ci and ci.lower() == "true":
print("The resource dump will be uploaded as artifact of the CI job")
with open(output_file_path, "wt") as output_file:
print("=" * 80, file=output_file)
@@ -84,26 +84,26 @@ class TestBase(unittest.TestCase):
@staticmethod
def _num_pods_in_namespace(namespace):
- air_pod = check_output(['kubectl', 'get', 'pods', '-n', namespace]).decode()
- air_pod = air_pod.split('\n')
- names = [re.compile(r'\s+').split(x)[0] for x in air_pod if 'airflow' in x]
+ air_pod = check_output(["kubectl", "get", "pods", "-n", namespace]).decode()
+ air_pod = air_pod.split("\n")
+ names = [re.compile(r"\s+").split(x)[0] for x in air_pod if "airflow" in x]
return len(names)
@staticmethod
- def _delete_airflow_pod(name=''):
- suffix = '-' + name if name else ''
- air_pod = check_output(['kubectl', 'get', 'pods']).decode()
- air_pod = air_pod.split('\n')
- names = [re.compile(r'\s+').split(x)[0] for x in air_pod if 'airflow' + suffix in x]
+ def _delete_airflow_pod(name=""):
+ suffix = "-" + name if name else ""
+ air_pod = check_output(["kubectl", "get", "pods"]).decode()
+ air_pod = air_pod.split("\n")
+ names = [re.compile(r"\s+").split(x)[0] for x in air_pod if "airflow" + suffix in x]
if names:
- check_call(['kubectl', 'delete', 'pod', names[0]])
+ check_call(["kubectl", "delete", "pod", names[0]])
def _get_session_with_retries(self):
session = requests.Session()
- session.auth = ('admin', 'admin')
+ session.auth = ("admin", "admin")
retries = Retry(total=3, backoff_factor=1)
- session.mount('http://', HTTPAdapter(max_retries=retries))
- session.mount('https://', HTTPAdapter(max_retries=retries))
+ session.mount("http://", HTTPAdapter(max_retries=retries))
+ session.mount("https://", HTTPAdapter(max_retries=retries))
return session
def _ensure_airflow_webserver_is_healthy(self):
@@ -138,7 +138,7 @@ class TestBase(unittest.TestCase):
def monitor_task(self, host, dag_run_id, dag_id, task_id, expected_final_state, timeout):
tries = 0
- state = ''
+ state = ""
max_tries = max(int(timeout / 5), 1)
# Wait some time for the operator to complete
while tries < max_tries:
@@ -146,7 +146,7 @@ class TestBase(unittest.TestCase):
# Check task state
try:
get_string = (
- f'http://{host}/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}'
... 145034 lines suppressed ...