You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/04/29 07:09:02 UTC

[airflow] branch master updated: Update pre-commit checks (#15583)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new 814e471  Update pre-commit checks (#15583)
814e471 is described below

commit 814e471d137aad68bd64a21d20736e7b88403f97
Author: Kamil BreguĊ‚a <mi...@users.noreply.github.com>
AuthorDate: Thu Apr 29 09:08:38 2021 +0200

    Update pre-commit checks (#15583)
---
 .pre-commit-config.yaml                            | 12 ++---
 .../example_dags/example_task_group_decorator.py   | 12 ++---
 .../aws/example_dags/example_s3_to_redshift.py     | 12 +++--
 .../apache/beam/example_dags/example_beam.py       | 12 +++--
 .../dingding/example_dags/example_dingding.py      | 25 +++++++----
 .../example_dags/example_bigquery_operations.py    | 19 ++++++--
 .../snowflake/example_dags/example_snowflake.py    | 16 ++++---
 airflow/utils/file.py                              |  2 +-
 dev/airflow-license                                |  4 +-
 dev/provider_packages/prepare_provider_packages.py |  2 +-
 tests/cli/commands/test_pool_command.py            |  2 +-
 tests/cli/commands/test_user_command.py            |  2 +-
 tests/dags/test_task_view_type_check.py            |  4 +-
 tests/jobs/test_scheduler_job.py                   |  4 +-
 tests/operators/test_sql.py                        | 22 ++++-----
 tests/operators/test_weekday.py                    |  2 +-
 tests/providers/jenkins/hooks/test_jenkins.py      |  2 +-
 tests/providers/slack/hooks/test_slack.py          | 12 ++---
 tests/providers/yandex/hooks/test_yandex.py        |  2 +-
 tests/utils/test_task_group.py                     | 52 +++++++++++-----------
 20 files changed, 125 insertions(+), 95 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index b48f1e7..b68511b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -26,7 +26,7 @@ repos:
       - id: identity
       - id: check-hooks-apply
   - repo: https://github.com/Lucas-C/pre-commit-hooks
-    rev: v1.1.9
+    rev: v1.1.10
     hooks:
       - id: forbid-tabs
         exclude: ^docs/Makefile$|^clients/gen/go.sh|\.gitmodules$
@@ -151,7 +151,7 @@ repos:
           - "--maxlevel"
           - "2"
   - repo: https://github.com/psf/black
-    rev: 20.8b1
+    rev: 21.4b2
     hooks:
       - id: black
         args: [--config=./pyproject.toml]
@@ -171,7 +171,7 @@ repos:
         args:
           - --remove
   - repo: https://github.com/asottile/pyupgrade
-    rev: v2.10.0
+    rev: v2.13.0
     hooks:
       - id: pyupgrade
         args: ["--py36-plus"]
@@ -181,7 +181,7 @@ repos:
       - id: rst-backticks
       - id: python-no-log-warn
   - repo: https://github.com/adrienverge/yamllint
-    rev: v1.26.0
+    rev: v1.26.1
     hooks:
       - id: yamllint
         name: Check yaml files with yamllint
@@ -190,7 +190,7 @@ repos:
         exclude:
           ^.*init_git_sync\.template\.yaml$|^.*airflow\.template\.yaml$|^chart/(?:templates|files)/.*\.yaml
   - repo: https://github.com/timothycrosley/isort
-    rev: 5.6.4
+    rev: 5.8.0
     hooks:
       - id: isort
         name: Run isort to sort imports
@@ -198,7 +198,7 @@ repos:
         # To keep consistent with the global isort skip config defined in setup.cfg
         exclude: ^build/.*$|^.tox/.*$|^venv/.*$
   - repo: https://github.com/pycqa/pydocstyle
-    rev: 5.1.1
+    rev: 6.0.0
     hooks:
       - id: pydocstyle
         name: Run pydocstyle
diff --git a/airflow/example_dags/example_task_group_decorator.py b/airflow/example_dags/example_task_group_decorator.py
index 39ee662..9705630 100644
--- a/airflow/example_dags/example_task_group_decorator.py
+++ b/airflow/example_dags/example_task_group_decorator.py
@@ -27,38 +27,38 @@ from airflow.utils.dates import days_ago
 # Creating Tasks
 @task
 def task_start():
-    """Dummy Task which is First Task of Dag """
+    """Dummy Task which is First Task of Dag"""
     return '[Task_start]'
 
 
 @task
 def task_1(value):
-    """ Dummy Task1"""
+    """Dummy Task1"""
     return f'[ Task1 {value} ]'
 
 
 @task
 def task_2(value):
-    """ Dummy Task2"""
+    """Dummy Task2"""
     return f'[ Task2 {value} ]'
 
 
 @task
 def task_3(value):
-    """ Dummy Task3"""
+    """Dummy Task3"""
     print(f'[ Task3 {value} ]')
 
 
 @task
 def task_end():
-    """ Dummy Task which is Last Task of Dag """
+    """Dummy Task which is Last Task of Dag"""
     print('[ Task_End  ]')
 
 
 # Creating TaskGroups
 @task_group
 def task_group_function(value):
-    """ TaskGroup for grouping related Tasks"""
+    """TaskGroup for grouping related Tasks"""
     return task_3(task_2(task_1(value)))
 
 
diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
index 76c79e5..1a7c911 100644
--- a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
+++ b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
@@ -75,7 +75,11 @@ with DAG(
     teardown__task_remove_sample_data_from_s3 = PythonOperator(
         python_callable=_remove_sample_data_from_s3, task_id='teardown__remove_sample_data_from_s3'
     )
-    [setup__task_add_sample_data_to_s3, setup__task_create_table] >> task_transfer_s3_to_redshift >> [
-        teardown__task_drop_table,
-        teardown__task_remove_sample_data_from_s3,
-    ]
+    (
+        [setup__task_add_sample_data_to_s3, setup__task_create_table]
+        >> task_transfer_s3_to_redshift
+        >> [
+            teardown__task_drop_table,
+            teardown__task_remove_sample_data_from_s3,
+        ]
+    )
diff --git a/airflow/providers/apache/beam/example_dags/example_beam.py b/airflow/providers/apache/beam/example_dags/example_beam.py
index d20c4ce..a95ec49 100644
--- a/airflow/providers/apache/beam/example_dags/example_beam.py
+++ b/airflow/providers/apache/beam/example_dags/example_beam.py
@@ -268,10 +268,14 @@ with models.DAG(
         py_system_site_packages=False,
     )
 
-    [
-        start_python_pipeline_local_direct_runner,
-        start_python_pipeline_direct_runner,
-    ] >> start_python_pipeline_local_flink_runner >> start_python_pipeline_local_spark_runner
+    (
+        [
+            start_python_pipeline_local_direct_runner,
+            start_python_pipeline_direct_runner,
+        ]
+        >> start_python_pipeline_local_flink_runner
+        >> start_python_pipeline_local_spark_runner
+    )
 
 
 with models.DAG(
diff --git a/airflow/providers/dingding/example_dags/example_dingding.py b/airflow/providers/dingding/example_dags/example_dingding.py
index 8f96bdd..70141d6 100644
--- a/airflow/providers/dingding/example_dags/example_dingding.py
+++ b/airflow/providers/dingding/example_dags/example_dingding.py
@@ -205,12 +205,19 @@ with DAG(
         message="",
     )
 
-    [
-        text_msg_remind_none,
-        text_msg_remind_specific,
-        text_msg_remind_include_invalid,
-        text_msg_remind_all,
-    ] >> link_msg >> markdown_msg >> [
-        single_action_card_msg,
-        multi_action_card_msg,
-    ] >> feed_card_msg >> msg_failure_callback
+    (
+        [
+            text_msg_remind_none,
+            text_msg_remind_specific,
+            text_msg_remind_include_invalid,
+            text_msg_remind_all,
+        ]
+        >> link_msg
+        >> markdown_msg
+        >> [
+            single_action_card_msg,
+            multi_action_card_msg,
+        ]
+        >> feed_card_msg
+        >> msg_failure_callback
+    )
diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py b/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py
index 96aa9a4..35ee4d8 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py
@@ -205,10 +205,21 @@ with models.DAG(
 
     create_dataset >> patch_dataset >> update_dataset >> get_dataset >> get_dataset_result >> delete_dataset
 
-    update_dataset >> create_table >> create_view >> create_materialized_view >> update_table >> [
-        get_dataset_tables,
-        delete_view,
-    ] >> upsert_table >> delete_materialized_view >> delete_table >> delete_dataset
+    (
+        update_dataset
+        >> create_table
+        >> create_view
+        >> create_materialized_view
+        >> update_table
+        >> [
+            get_dataset_tables,
+            delete_view,
+        ]
+        >> upsert_table
+        >> delete_materialized_view
+        >> delete_table
+        >> delete_dataset
+    )
     update_dataset >> create_external_table >> delete_dataset
 
 with models.DAG(
diff --git a/airflow/providers/snowflake/example_dags/example_snowflake.py b/airflow/providers/snowflake/example_dags/example_snowflake.py
index c19a833..643becf 100644
--- a/airflow/providers/snowflake/example_dags/example_snowflake.py
+++ b/airflow/providers/snowflake/example_dags/example_snowflake.py
@@ -123,9 +123,13 @@ slack_report = SnowflakeToSlackOperator(
 
 # [END howto_operator_snowflake_to_slack]
 
-snowflake_op_sql_str >> [
-    snowflake_op_with_params,
-    snowflake_op_sql_list,
-    snowflake_op_template_file,
-    copy_into_table,
-] >> slack_report
+(
+    snowflake_op_sql_str
+    >> [
+        snowflake_op_with_params,
+        snowflake_op_sql_list,
+        snowflake_op_template_file,
+        copy_into_table,
+    ]
+    >> slack_report
+)
diff --git a/airflow/utils/file.py b/airflow/utils/file.py
index c5dca17..b008612 100644
--- a/airflow/utils/file.py
+++ b/airflow/utils/file.py
@@ -65,7 +65,7 @@ def mkdirs(path, mode):
     Path(path).mkdir(mode=mode, parents=True, exist_ok=True)
 
 
-ZIP_REGEX = re.compile(r'((.*\.zip){})?(.*)'.format(re.escape(os.sep)))
+ZIP_REGEX = re.compile(fr'((.*\.zip){re.escape(os.sep)})?(.*)')
 
 
 def correct_maybe_zipped(fileloc):
diff --git a/dev/airflow-license b/dev/airflow-license
index 2aac7e5..80ddfbf 100755
--- a/dev/airflow-license
+++ b/dev/airflow-license
@@ -75,7 +75,7 @@ if __name__ == "__main__":
     for notice in notices:
         notice = notice[0]
         license = parse_license_file(notice[1])
-        print("{:<30}|{:<50}||{:<20}||{:<10}".format(notice[1], notice[2][:50], notice[0], license))
+        print(f"{notice[1]:<30}|{notice[2][:50]:<50}||{notice[0]:<20}||{license:<10}")
 
     file_count = len([name for name in os.listdir("../licenses")])
-    print("Defined licenses: {} Files found: {}".format(len(notices), file_count))
+    print(f"Defined licenses: {len(notices)} Files found: {file_count}")
diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index bd1cbfe..8e99cab 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -323,7 +323,7 @@ def get_long_description(provider_package_id: str) -> str:
     readme_file = os.path.join(package_folder, "README.md")
     if not os.path.exists(readme_file):
         return ""
-    with open(readme_file, encoding='utf-8', mode="r") as file:
+    with open(readme_file, encoding='utf-8') as file:
         readme_contents = file.read()
     copying = True
     long_description = ""
diff --git a/tests/cli/commands/test_pool_command.py b/tests/cli/commands/test_pool_command.py
index 92fb46d..ab06c35 100644
--- a/tests/cli/commands/test_pool_command.py
+++ b/tests/cli/commands/test_pool_command.py
@@ -114,7 +114,7 @@ class TestCliPools(unittest.TestCase):
         # Export json
         pool_command.pool_export(self.parser.parse_args(['pools', 'export', 'pools_export.json']))
 
-        with open('pools_export.json', mode='r') as file:
+        with open('pools_export.json') as file:
             pool_config_output = json.load(file)
             assert pool_config_input == pool_config_output, "Input and output pool files are not same"
         os.remove('pools_import.json')
diff --git a/tests/cli/commands/test_user_command.py b/tests/cli/commands/test_user_command.py
index 9e94045..221d6d9 100644
--- a/tests/cli/commands/test_user_command.py
+++ b/tests/cli/commands/test_user_command.py
@@ -230,7 +230,7 @@ class TestCliUsers:
         self._import_users_from_file([user1, user2])
 
         users_filename = self._export_users_to_file()
-        with open(users_filename, mode='r') as file:
+        with open(users_filename) as file:
             retrieved_users = json.loads(file.read())
         os.remove(users_filename)
 
diff --git a/tests/dags/test_task_view_type_check.py b/tests/dags/test_task_view_type_check.py
index 5003cad..5e6bb01 100644
--- a/tests/dags/test_task_view_type_check.py
+++ b/tests/dags/test_task_view_type_check.py
@@ -37,11 +37,11 @@ class CallableClass:
     """
 
     def __call__(self):
-        """A __call__ method """
+        """A __call__ method"""
 
 
 def a_function(_, __):
-    """A function with two args """
+    """A function with two args"""
 
 
 partial_function = functools.partial(a_function, arg_x=1)
diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py
index 875f3e8..016905d 100644
--- a/tests/jobs/test_scheduler_job.py
+++ b/tests/jobs/test_scheduler_job.py
@@ -3437,7 +3437,7 @@ class TestSchedulerJob(unittest.TestCase):
         assert detected_files == expected_files
 
     def test_adopt_or_reset_orphaned_tasks_nothing(self):
-        """Try with nothing. """
+        """Try with nothing."""
         self.scheduler_job = SchedulerJob()
         session = settings.Session()
         assert 0 == self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session)
@@ -3497,7 +3497,7 @@ class TestSchedulerJob(unittest.TestCase):
         session.rollback()
 
     def test_reset_orphaned_tasks_nonexistent_dagrun(self):
-        """Make sure a task in an orphaned state is not reset if it has no dagrun. """
+        """Make sure a task in an orphaned state is not reset if it has no dagrun."""
         dag_id = 'test_reset_orphaned_tasks_nonexistent_dagrun'
         dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
         task_id = dag_id + '_task'
diff --git a/tests/operators/test_sql.py b/tests/operators/test_sql.py
index 099e0e6..623b667 100644
--- a/tests/operators/test_sql.py
+++ b/tests/operators/test_sql.py
@@ -395,7 +395,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
             session.query(TI).delete()
 
     def test_unsupported_conn_type(self):
-        """Check if BranchSQLOperator throws an exception for unsupported connection type """
+        """Check if BranchSQLOperator throws an exception for unsupported connection type"""
         op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="redis_default",
@@ -409,7 +409,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
             op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
     def test_invalid_conn(self):
-        """Check if BranchSQLOperator throws an exception for invalid connection """
+        """Check if BranchSQLOperator throws an exception for invalid connection"""
         op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="invalid_connection",
@@ -423,7 +423,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
             op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
     def test_invalid_follow_task_true(self):
-        """Check if BranchSQLOperator throws an exception for invalid connection """
+        """Check if BranchSQLOperator throws an exception for invalid connection"""
         op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="invalid_connection",
@@ -437,7 +437,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
             op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
 
     def test_invalid_follow_task_false(self):
-        """Check if BranchSQLOperator throws an exception for invalid connection """
+        """Check if BranchSQLOperator throws an exception for invalid connection"""
         op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="invalid_connection",
@@ -452,7 +452,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @pytest.mark.backend("mysql")
     def test_sql_branch_operator_mysql(self):
-        """Check if BranchSQLOperator works with backend """
+        """Check if BranchSQLOperator works with backend"""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -465,7 +465,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @pytest.mark.backend("postgres")
     def test_sql_branch_operator_postgres(self):
-        """Check if BranchSQLOperator works with backend """
+        """Check if BranchSQLOperator works with backend"""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="postgres_default",
@@ -478,7 +478,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
     def test_branch_single_value_with_dag_run(self, mock_get_db_hook):
-        """Check BranchSQLOperator branch operation """
+        """Check BranchSQLOperator branch operation"""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -518,7 +518,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
     def test_branch_true_with_dag_run(self, mock_get_db_hook):
-        """Check BranchSQLOperator branch operation """
+        """Check BranchSQLOperator branch operation"""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -559,7 +559,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
     def test_branch_false_with_dag_run(self, mock_get_db_hook):
-        """Check BranchSQLOperator branch operation """
+        """Check BranchSQLOperator branch operation"""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -642,7 +642,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
     def test_invalid_query_result_with_dag_run(self, mock_get_db_hook):
-        """Check BranchSQLOperator branch operation """
+        """Check BranchSQLOperator branch operation"""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -672,7 +672,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseSQLOperator.get_db_hook")
     def test_with_skip_in_branch_downstream_dependencies(self, mock_get_db_hook):
-        """Test SQL Branch with skipping all downstream dependencies """
+        """Test SQL Branch with skipping all downstream dependencies"""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
diff --git a/tests/operators/test_weekday.py b/tests/operators/test_weekday.py
index 35e1ed4..1a16518 100644
--- a/tests/operators/test_weekday.py
+++ b/tests/operators/test_weekday.py
@@ -128,7 +128,7 @@ class TestBranchDayOfWeekOperator(unittest.TestCase):
 
     @freeze_time("2021-01-25")  # Monday
     def test_branch_follow_true_with_execution_date(self):
-        """Checks if BranchDayOfWeekOperator follows true branch when set use_task_execution_day """
+        """Checks if BranchDayOfWeekOperator follows true branch when set use_task_execution_day"""
 
         branch_op = BranchDayOfWeekOperator(
             task_id="make_choice",
diff --git a/tests/providers/jenkins/hooks/test_jenkins.py b/tests/providers/jenkins/hooks/test_jenkins.py
index 71fb45c..b16f946 100644
--- a/tests/providers/jenkins/hooks/test_jenkins.py
+++ b/tests/providers/jenkins/hooks/test_jenkins.py
@@ -25,7 +25,7 @@ from airflow.providers.jenkins.hooks.jenkins import JenkinsHook
 class TestJenkinsHook(unittest.TestCase):
     @mock.patch('airflow.hooks.base.BaseHook.get_connection')
     def test_client_created_default_http(self, get_connection_mock):
-        """tests `init` method to validate http client creation when all parameters are passed """
+        """tests `init` method to validate http client creation when all parameters are passed"""
         default_connection_id = 'jenkins_default'
 
         connection_host = 'http://test.com'
diff --git a/tests/providers/slack/hooks/test_slack.py b/tests/providers/slack/hooks/test_slack.py
index 0998aac..5a23594 100644
--- a/tests/providers/slack/hooks/test_slack.py
+++ b/tests/providers/slack/hooks/test_slack.py
@@ -28,7 +28,7 @@ from airflow.providers.slack.hooks.slack import SlackHook
 
 class TestSlackHook(unittest.TestCase):
     def test_get_token_with_token_only(self):
-        """tests `__get_token` method when only token is provided """
+        """tests `__get_token` method when only token is provided"""
         # Given
         test_token = 'test_token'
         test_conn_id = None
@@ -44,7 +44,7 @@ class TestSlackHook(unittest.TestCase):
     @mock.patch('airflow.providers.slack.hooks.slack.WebClient')
     @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
     def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock, mock_slack_client):
-        """tests `__get_token` method when only connection is provided """
+        """tests `__get_token` method when only connection is provided"""
         # Given
         test_token = None
         test_conn_id = 'x'
@@ -64,7 +64,7 @@ class TestSlackHook(unittest.TestCase):
 
     @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
     def test_get_token_with_no_password_slack_conn_id_only(self, get_connection_mock):
-        """tests `__get_token` method when only connection is provided """
+        """tests `__get_token` method when only connection is provided"""
 
         # Mock
         conn = mock.Mock()
@@ -77,7 +77,7 @@ class TestSlackHook(unittest.TestCase):
 
     @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
     def test_get_token_with_empty_password_slack_conn_id_only(self, get_connection_mock):
-        """tests `__get_token` method when only connection is provided """
+        """tests `__get_token` method when only connection is provided"""
 
         # Mock
         get_connection_mock.return_value = mock.Mock(password=None)
@@ -87,7 +87,7 @@ class TestSlackHook(unittest.TestCase):
             SlackHook(token=None, slack_conn_id='x')
 
     def test_get_token_with_token_and_slack_conn_id(self):
-        """tests `__get_token` method when both arguments are provided """
+        """tests `__get_token` method when both arguments are provided"""
         # Given
         test_token = 'test_token'
         test_conn_id = 'x'
@@ -101,7 +101,7 @@ class TestSlackHook(unittest.TestCase):
         assert output == expected
 
     def test_get_token_with_out_token_nor_slack_conn_id(self):
-        """tests `__get_token` method when no arguments are provided """
+        """tests `__get_token` method when no arguments are provided"""
 
         with pytest.raises(AirflowException):
             SlackHook(token=None, slack_conn_id=None)
diff --git a/tests/providers/yandex/hooks/test_yandex.py b/tests/providers/yandex/hooks/test_yandex.py
index 469c16b..b4ddf0e 100644
--- a/tests/providers/yandex/hooks/test_yandex.py
+++ b/tests/providers/yandex/hooks/test_yandex.py
@@ -29,7 +29,7 @@ class TestYandexHook(unittest.TestCase):
     @mock.patch('airflow.hooks.base.BaseHook.get_connection')
     @mock.patch('airflow.providers.yandex.hooks.yandex.YandexCloudBaseHook._get_credentials')
     def test_client_created_without_exceptions(self, get_credentials_mock, get_connection_mock):
-        """tests `init` method to validate client creation when all parameters are passed """
+        """tests `init` method to validate client creation when all parameters are passed"""
 
         # Inputs to constructor
         default_folder_id = 'test_id'
diff --git a/tests/utils/test_task_group.py b/tests/utils/test_task_group.py
index ceefdbf..b21a9d7 100644
--- a/tests/utils/test_task_group.py
+++ b/tests/utils/test_task_group.py
@@ -598,7 +598,7 @@ def test_build_task_group_deco_context_manager():
     # Creating Tasks
     @task
     def task_start():
-        """Dummy Task which is First Task of Dag """
+        """Dummy Task which is First Task of Dag"""
         return '[Task_start]'
 
     @task
@@ -608,32 +608,32 @@ def test_build_task_group_deco_context_manager():
 
     @task
     def task_1(value):
-        """ Dummy Task1"""
+        """Dummy Task1"""
         return f'[ Task1 {value} ]'
 
     @task
     def task_2(value):
-        """ Dummy Task2"""
+        """Dummy Task2"""
         print(f'[ Task2 {value} ]')
 
     @task
     def task_3(value):
-        """ Dummy Task3"""
+        """Dummy Task3"""
         return f'[ Task3 {value} ]'
 
     @task
     def task_4(value):
-        """ Dummy Task3"""
+        """Dummy Task3"""
         print(f'[ Task4 {value} ]')
 
     # Creating TaskGroups
     @task_group_decorator
     def section_1(value):
-        """ TaskGroup for grouping related Tasks"""
+        """TaskGroup for grouping related Tasks"""
 
         @task_group_decorator()
         def section_2(value2):
-            """ TaskGroup for grouping related Tasks"""
+            """TaskGroup for grouping related Tasks"""
             return task_4(task_3(value2))
 
         op1 = task_2(task_1(value))
@@ -688,12 +688,12 @@ def test_build_task_group_deco_context_manager():
 
 
 def test_build_task_group_with_operators():
-    """  Tests DAG with Tasks created with *Operators and TaskGroup created with taskgroup decorator """
+    """Tests DAG with Tasks created with *Operators and TaskGroup created with taskgroup decorator"""
 
     from airflow.decorators import task
 
     def task_start():
-        """Dummy Task which is First Task of Dag """
+        """Dummy Task which is First Task of Dag"""
         return '[Task_start]'
 
     def task_end():
@@ -703,23 +703,23 @@ def test_build_task_group_with_operators():
     # Creating Tasks
     @task
     def task_1(value):
-        """ Dummy Task1"""
+        """Dummy Task1"""
         return f'[ Task1 {value} ]'
 
     @task
     def task_2(value):
-        """ Dummy Task2"""
+        """Dummy Task2"""
         return f'[ Task2 {value} ]'
 
     @task
     def task_3(value):
-        """ Dummy Task3"""
+        """Dummy Task3"""
         print(f'[ Task3 {value} ]')
 
     # Creating TaskGroups
     @task_group_decorator(group_id='section_1')
     def section_a(value):
-        """ TaskGroup for grouping related Tasks"""
+        """TaskGroup for grouping related Tasks"""
         return task_3(task_2(task_1(value)))
 
     execution_date = pendulum.parse("20201109")
@@ -743,12 +743,12 @@ def test_build_task_group_with_operators():
 
 
 def test_task_group_context_mix():
-    """ Test cases to check nested TaskGroup context manager with taskgroup decorator"""
+    """Test cases to check nested TaskGroup context manager with taskgroup decorator"""
 
     from airflow.decorators import task
 
     def task_start():
-        """Dummy Task which is First Task of Dag """
+        """Dummy Task which is First Task of Dag"""
         return '[Task_start]'
 
     def task_end():
@@ -758,23 +758,23 @@ def test_task_group_context_mix():
     # Creating Tasks
     @task
     def task_1(value):
-        """ Dummy Task1"""
+        """Dummy Task1"""
         return f'[ Task1 {value} ]'
 
     @task
     def task_2(value):
-        """ Dummy Task2"""
+        """Dummy Task2"""
         return f'[ Task2 {value} ]'
 
     @task
     def task_3(value):
-        """ Dummy Task3"""
+        """Dummy Task3"""
         print(f'[ Task3 {value} ]')
 
     # Creating TaskGroups
     @task_group_decorator
     def section_2(value):
-        """ TaskGroup for grouping related Tasks"""
+        """TaskGroup for grouping related Tasks"""
         return task_3(task_2(task_1(value)))
 
     execution_date = pendulum.parse("20201109")
@@ -824,13 +824,13 @@ def test_task_group_context_mix():
 
 
 def test_duplicate_task_group_id():
-    """ Testing automatic suffix assignment for duplicate group_id"""
+    """Testing automatic suffix assignment for duplicate group_id"""
 
     from airflow.decorators import task
 
     @task(task_id='start_task')
     def task_start():
-        """Dummy Task which is First Task of Dag """
+        """Dummy Task which is First Task of Dag"""
         print('[Task_start]')
 
     @task(task_id='end_task')
@@ -841,17 +841,17 @@ def test_duplicate_task_group_id():
     # Creating Tasks
     @task(task_id='task')
     def task_1():
-        """ Dummy Task1"""
+        """Dummy Task1"""
         print('[Task1]')
 
     @task(task_id='task')
     def task_2():
-        """ Dummy Task2"""
+        """Dummy Task2"""
         print('[Task2]')
 
     @task(task_id='task1')
     def task_3():
-        """ Dummy Task3"""
+        """Dummy Task3"""
         print('[Task3]')
 
     @task_group_decorator('task_group1')
@@ -898,7 +898,7 @@ def test_call_taskgroup_twice():
 
     @task(task_id='start_task')
     def task_start():
-        """Dummy Task which is First Task of Dag """
+        """Dummy Task which is First Task of Dag"""
         print('[Task_start]')
 
     @task(task_id='end_task')
@@ -909,7 +909,7 @@ def test_call_taskgroup_twice():
     # Creating Tasks
     @task(task_id='task')
     def task_1():
-        """ Dummy Task1"""
+        """Dummy Task1"""
         print('[Task1]')
 
     @task_group_decorator