You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by je...@apache.org on 2022/07/01 18:12:23 UTC

[airflow] branch main updated: Align Black and blacken-docs configs (#24785)

This is an automated email from the ASF dual-hosted git repository.

jedcunningham pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 46ac083f7e Align Black and blacken-docs configs (#24785)
46ac083f7e is described below

commit 46ac083f7e92cf8af046c8b5741e7c26ebffc05e
Author: Bas Harenslak <Ba...@users.noreply.github.com>
AuthorDate: Fri Jul 1 20:12:15 2022 +0200

    Align Black and blacken-docs configs (#24785)
---
 .pre-commit-config.yaml                            |  7 ++++++
 RELEASE_NOTES.rst                                  |  4 +---
 .../providers/amazon/aws/hooks/batch_waiters.py    |  8 ++-----
 airflow/providers/google/CHANGELOG.rst             |  3 +--
 airflow/providers/oracle/hooks/oracle.py           |  7 +-----
 .../connections/aws.rst                            |  8 ++-----
 docs/apache-airflow/best-practices.rst             |  8 ++-----
 docs/apache-airflow/concepts/dags.rst              |  4 +---
 .../concepts/dynamic-task-mapping.rst              | 12 +++-------
 docs/apache-airflow/executor/kubernetes.rst        |  8 ++-----
 docs/apache-airflow/howto/connection.rst           | 16 ++++---------
 docs/apache-airflow/howto/custom-operator.rst      |  4 +---
 docs/apache-airflow/howto/customize-ui.rst         |  4 +---
 docs/apache-airflow/howto/define_extra_link.rst    | 10 ++++----
 docs/apache-airflow/lineage.rst                    |  8 ++-----
 docs/apache-airflow/plugins.rst                    |  4 +---
 docs/apache-airflow/security/webserver.rst         |  8 ++-----
 docs/apache-airflow/upgrading-from-1-10/index.rst  | 28 ++++++----------------
 tests/dags_corrupted/README.md                     |  4 +---
 tests/test_utils/perf/perf_kit/__init__.py         | 10 ++------
 20 files changed, 47 insertions(+), 118 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5f6d3ffb86..1548fe07e5 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -160,6 +160,13 @@ repos:
     hooks:
       - id: blacken-docs
         name: Run black on python code blocks in documentation files
+        args:
+          - --line-length=110
+          - --target-version=py37
+          - --target-version=py38
+          - --target-version=py39
+          - --target-version=py310
+          - --skip-string-normalization
         alias: black
         additional_dependencies: [black==22.3.0]
   - repo: https://github.com/pre-commit/pre-commit-hooks
diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst
index 60c4a86d2d..6f0a222704 100644
--- a/RELEASE_NOTES.rst
+++ b/RELEASE_NOTES.rst
@@ -6874,9 +6874,7 @@ New signature:
 
 .. code-block:: python
 
-   def wait_for_transfer_job(
-       self, job, expected_statuses=(GcpTransferOperationStatus.SUCCESS,)
-   ):
+   def wait_for_transfer_job(self, job, expected_statuses=(GcpTransferOperationStatus.SUCCESS,)):
        ...
 
 The behavior of ``wait_for_transfer_job`` has changed:
diff --git a/airflow/providers/amazon/aws/hooks/batch_waiters.py b/airflow/providers/amazon/aws/hooks/batch_waiters.py
index 59ba0e431f..3d1cd9d4b4 100644
--- a/airflow/providers/amazon/aws/hooks/batch_waiters.py
+++ b/airflow/providers/amazon/aws/hooks/batch_waiters.py
@@ -71,16 +71,12 @@ class BatchWaitersHook(BatchClientHook):
         # and the details of the config on that waiter can be further modified without any
         # accidental impact on the generation of new waiters from the defined waiter_model, e.g.
         waiters.get_waiter("JobExists").config.delay  # -> 5
-        waiter = waiters.get_waiter(
-            "JobExists"
-        )  # -> botocore.waiter.Batch.Waiter.JobExists object
+        waiter = waiters.get_waiter("JobExists")  # -> botocore.waiter.Batch.Waiter.JobExists object
         waiter.config.delay = 10
         waiters.get_waiter("JobExists").config.delay  # -> 5 as defined by waiter_model
 
         # To use a specific waiter, update the config and call the `wait()` method for jobId, e.g.
-        waiter = waiters.get_waiter(
-            "JobExists"
-        )  # -> botocore.waiter.Batch.Waiter.JobExists object
+        waiter = waiters.get_waiter("JobExists")  # -> botocore.waiter.Batch.Waiter.JobExists object
         waiter.config.delay = random.uniform(1, 10)  # seconds
         waiter.config.max_attempts = 10
         waiter.wait(jobs=[jobId])
diff --git a/airflow/providers/google/CHANGELOG.rst b/airflow/providers/google/CHANGELOG.rst
index cf0b4e6f69..fb23866292 100644
--- a/airflow/providers/google/CHANGELOG.rst
+++ b/airflow/providers/google/CHANGELOG.rst
@@ -897,8 +897,7 @@ now the snake_case convention is used.
     set_acl_permission = GCSBucketCreateAclEntryOperator(
         task_id="gcs-set-acl-permission",
         bucket=BUCKET_NAME,
-        entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity']"
-        ".split(':', 2)[1] }}",
+        entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity'].split(':', 2)[1] }}",
         role="OWNER",
     )
 
diff --git a/airflow/providers/oracle/hooks/oracle.py b/airflow/providers/oracle/hooks/oracle.py
index 57b5bead41..eb94b435e8 100644
--- a/airflow/providers/oracle/hooks/oracle.py
+++ b/airflow/providers/oracle/hooks/oracle.py
@@ -77,12 +77,7 @@ class OracleHook(DbApiHook):
 
         .. code-block:: python
 
-           {
-               "dsn": (
-                   "(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)"
-                   "(HOST=host)(PORT=1521))(CONNECT_DATA=(SID=sid)))"
-               )
-           }
+           {"dsn": ("(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=host)(PORT=1521))(CONNECT_DATA=(SID=sid)))")}
 
         see more param detail in `oracledb.connect
         <https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.connect>`_
diff --git a/docs/apache-airflow-providers-amazon/connections/aws.rst b/docs/apache-airflow-providers-amazon/connections/aws.rst
index 59692c664b..144eeb270b 100644
--- a/docs/apache-airflow-providers-amazon/connections/aws.rst
+++ b/docs/apache-airflow-providers-amazon/connections/aws.rst
@@ -313,17 +313,13 @@ Example
         def federated(self):
             return "federation" in self.extra_config
 
-        def _create_basic_session(
-            self, session_kwargs: Dict[str, Any]
-        ) -> boto3.session.Session:
+        def _create_basic_session(self, session_kwargs: Dict[str, Any]) -> boto3.session.Session:
             if self.federated:
                 return self._create_federated_session(session_kwargs)
             else:
                 return super()._create_basic_session(session_kwargs)
 
-        def _create_federated_session(
-            self, session_kwargs: Dict[str, Any]
-        ) -> boto3.session.Session:
+        def _create_federated_session(self, session_kwargs: Dict[str, Any]) -> boto3.session.Session:
             username = self.extra_config["federation"]["username"]
             region_name = self._get_region_name()
             self.log.debug(
diff --git a/docs/apache-airflow/best-practices.rst b/docs/apache-airflow/best-practices.rst
index aadeccfeac..f4acc7006c 100644
--- a/docs/apache-airflow/best-practices.rst
+++ b/docs/apache-airflow/best-practices.rst
@@ -292,12 +292,8 @@ It's easier to grab the concept with an example. Let's say that we have the foll
         start_date=datetime(2021, 1, 1),
         catchup=False,
     ) as dag:
-        failing_task = BashOperator(
-            task_id="failing_task", bash_command="exit 1", retries=0
-        )
-        passing_task = BashOperator(
-            task_id="passing_task", bash_command="echo passing_task"
-        )
+        failing_task = BashOperator(task_id="failing_task", bash_command="exit 1", retries=0)
+        passing_task = BashOperator(task_id="passing_task", bash_command="echo passing_task")
         teardown = BashOperator(
             task_id="teardown",
             bash_command="echo teardown",
diff --git a/docs/apache-airflow/concepts/dags.rst b/docs/apache-airflow/concepts/dags.rst
index 3f1d9cc64b..90473d8db7 100644
--- a/docs/apache-airflow/concepts/dags.rst
+++ b/docs/apache-airflow/concepts/dags.rst
@@ -413,9 +413,7 @@ You can also combine this with the :ref:`concepts:depends-on-past` functionality
         )
 
         run_this_first = EmptyOperator(task_id="run_this_first", dag=dag)
-        branching = BranchPythonOperator(
-            task_id="branching", dag=dag, python_callable=lambda: "branch_a"
-        )
+        branching = BranchPythonOperator(task_id="branching", dag=dag, python_callable=lambda: "branch_a")
 
         branch_a = EmptyOperator(task_id="branch_a", dag=dag)
         follow_branch_a = EmptyOperator(task_id="follow_branch_a", dag=dag)
diff --git a/docs/apache-airflow/concepts/dynamic-task-mapping.rst b/docs/apache-airflow/concepts/dynamic-task-mapping.rst
index 76587a77f3..30622b024c 100644
--- a/docs/apache-airflow/concepts/dynamic-task-mapping.rst
+++ b/docs/apache-airflow/concepts/dynamic-task-mapping.rst
@@ -173,9 +173,7 @@ It is possible to use ``partial`` and ``expand`` with classic style operators as
 
 .. code-block:: python
 
-    BashOperator.partial(task_id="bash", do_xcom_push=False).expand(
-        bash_command=["echo 1", "echo 2"]
-    )
+    BashOperator.partial(task_id="bash", do_xcom_push=False).expand(bash_command=["echo 1", "echo 2"])
 
 .. note:: Only keyword arguments are allowed to be passed to ``partial()``.
 
@@ -224,9 +222,7 @@ In this example you have a regular data delivery to an S3 bucket and want to app
         def total(lines):
             return sum(lines)
 
-        counts = count_lines.partial(aws_conn_id="aws_default", bucket=files.bucket).expand(
-            file=XComArg(files)
-        )
+        counts = count_lines.partial(aws_conn_id="aws_default", bucket=files.bucket).expand(file=XComArg(files))
         total(lines=counts)
 
 What data types can be expanded?
@@ -299,9 +295,7 @@ There are two limits that you can place on a task:
           return x + 1
 
 
-      BashOperator.partial(task_id="my_task", max_active_tis_per_dag=16).expand(
-          bash_command=commands
-      )
+      BashOperator.partial(task_id="my_task", max_active_tis_per_dag=16).expand(bash_command=commands)
 
 Automatically skipping zero-length maps
 =======================================
diff --git a/docs/apache-airflow/executor/kubernetes.rst b/docs/apache-airflow/executor/kubernetes.rst
index a59fe5c0e4..3dcd388261 100644
--- a/docs/apache-airflow/executor/kubernetes.rst
+++ b/docs/apache-airflow/executor/kubernetes.rst
@@ -172,12 +172,8 @@ Here is an example of a task with both features:
         tags=["example3"],
     ) as dag:
         executor_config_template = {
-            "pod_template_file": os.path.join(
-                AIRFLOW_HOME, "pod_templates/basic_template.yaml"
-            ),
-            "pod_override": k8s.V1Pod(
-                metadata=k8s.V1ObjectMeta(labels={"release": "stable"})
-            ),
+            "pod_template_file": os.path.join(AIRFLOW_HOME, "pod_templates/basic_template.yaml"),
+            "pod_override": k8s.V1Pod(metadata=k8s.V1ObjectMeta(labels={"release": "stable"})),
         }
 
         @task(executor_config=executor_config_template)
diff --git a/docs/apache-airflow/howto/connection.rst b/docs/apache-airflow/howto/connection.rst
index 6171561baa..8c67429547 100644
--- a/docs/apache-airflow/howto/connection.rst
+++ b/docs/apache-airflow/howto/connection.rst
@@ -250,9 +250,7 @@ Here's an example:
         from wtforms import StringField
 
         return {
-            "workspace": StringField(
-                lazy_gettext("Workspace"), widget=BS3TextFieldWidget()
-            ),
+            "workspace": StringField(lazy_gettext("Workspace"), widget=BS3TextFieldWidget()),
             "project": StringField(lazy_gettext("Project"), widget=BS3TextFieldWidget()),
         }
 
@@ -413,9 +411,7 @@ You can verify a URI is parsed correctly like so:
 
     >>> from airflow.models.connection import Connection
 
-    >>> c = Connection(
-    ...     uri="my-conn-type://my-login:my-password@my-host:5432/my-schema?param1=val1&param2=val2"
-    ... )
+    >>> c = Connection(uri="my-conn-type://my-login:my-password@my-host:5432/my-schema?param1=val1&param2=val2")
     >>> print(c.login)
     my-login
     >>> print(c.password)
@@ -437,17 +433,13 @@ For example if your password has a ``/``, this fails:
 
 .. code-block:: pycon
 
-    >>> c = Connection(
-    ...     uri="my-conn-type://my-login:my-pa/ssword@my-host:5432/my-schema?param1=val1&param2=val2"
-    ... )
+    >>> c = Connection(uri="my-conn-type://my-login:my-pa/ssword@my-host:5432/my-schema?param1=val1&param2=val2")
     ValueError: invalid literal for int() with base 10: 'my-pa'
 
 To fix this, you can encode with :func:`~urllib.parse.quote_plus`:
 
 .. code-block:: pycon
 
-    >>> c = Connection(
-    ...     uri="my-conn-type://my-login:my-pa%2Fssword@my-host:5432/my-schema?param1=val1&param2=val2"
-    ... )
+    >>> c = Connection(uri="my-conn-type://my-login:my-pa%2Fssword@my-host:5432/my-schema?param1=val1&param2=val2")
     >>> print(c.password)
     my-pa/ssword
diff --git a/docs/apache-airflow/howto/custom-operator.rst b/docs/apache-airflow/howto/custom-operator.rst
index c4b903471a..1fe8b3e2da 100644
--- a/docs/apache-airflow/howto/custom-operator.rst
+++ b/docs/apache-airflow/howto/custom-operator.rst
@@ -169,9 +169,7 @@ You can use the template as follows:
 .. code-block:: python
 
         with dag:
-            hello_task = HelloOperator(
-                task_id="task_id_1", dag=dag, name="{{ task_instance.task_id }}"
-            )
+            hello_task = HelloOperator(task_id="task_id_1", dag=dag, name="{{ task_instance.task_id }}")
 
 In this example, Jinja looks for the ``name`` parameter and substitutes ``{{ task_instance.task_id }}`` with
 ``task_id_1``.
diff --git a/docs/apache-airflow/howto/customize-ui.rst b/docs/apache-airflow/howto/customize-ui.rst
index f11a874146..2258b588a1 100644
--- a/docs/apache-airflow/howto/customize-ui.rst
+++ b/docs/apache-airflow/howto/customize-ui.rst
@@ -166,8 +166,6 @@ information, see `String Formatting in the MarkupSafe docs <https://markupsafe.p
     .. code-block:: python
 
       DASHBOARD_UIALERTS = [
-          UIAlert(
-              'Visit <a href="https://airflow.apache.org">airflow.apache.org</a>', html=True
-          ),
+          UIAlert('Visit <a href="https://airflow.apache.org">airflow.apache.org</a>', html=True),
           UIAlert(Markup("Welcome <em>%s</em>") % ("John & Jane Doe",)),
       ]
diff --git a/docs/apache-airflow/howto/define_extra_link.rst b/docs/apache-airflow/howto/define_extra_link.rst
index c793a9320b..9bae547492 100644
--- a/docs/apache-airflow/howto/define_extra_link.rst
+++ b/docs/apache-airflow/howto/define_extra_link.rst
@@ -93,12 +93,10 @@ tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Ope
       operators = [GCSToS3Operator]
 
       def get_link(self, operator, *, ti_key):
-          return (
-              "https://s3.amazonaws.com/airflow-logs/{dag_id}/{task_id}/{run_id}".format(
-                  dag_id=operator.dag_id,
-                  task_id=operator.task_id,
-                  run_id=ti_key.run_id,
-              )
+          return "https://s3.amazonaws.com/airflow-logs/{dag_id}/{task_id}/{run_id}".format(
+              dag_id=operator.dag_id,
+              task_id=operator.task_id,
+              run_id=ti_key.run_id,
           )
 
 
diff --git a/docs/apache-airflow/lineage.rst b/docs/apache-airflow/lineage.rst
index 20adfb96fa..8029d94cee 100644
--- a/docs/apache-airflow/lineage.rst
+++ b/docs/apache-airflow/lineage.rst
@@ -50,9 +50,7 @@ works.
     )
 
     f_final = File(url="/tmp/final")
-    run_this_last = EmptyOperator(
-        task_id="run_this_last", dag=dag, inlets=AUTO, outlets=f_final
-    )
+    run_this_last = EmptyOperator(task_id="run_this_last", dag=dag, inlets=AUTO, outlets=f_final)
 
     f_in = File(url="/tmp/whole_directory/")
     outlets = []
@@ -60,9 +58,7 @@ works.
         f_out = File(url="/tmp/{}/{{{{ data_interval_start }}}}".format(file))
         outlets.append(f_out)
 
-    run_this = BashOperator(
-        task_id="run_me_first", bash_command="echo 1", dag=dag, inlets=f_in, outlets=outlets
-    )
+    run_this = BashOperator(task_id="run_me_first", bash_command="echo 1", dag=dag, inlets=f_in, outlets=outlets)
     run_this.set_downstream(run_this_last)
 
 Inlets can be a (list of) upstream task ids or statically defined as an attr annotated object
diff --git a/docs/apache-airflow/plugins.rst b/docs/apache-airflow/plugins.rst
index f719cbae23..ae011caa12 100644
--- a/docs/apache-airflow/plugins.rst
+++ b/docs/apache-airflow/plugins.rst
@@ -302,9 +302,7 @@ will automatically load the registered plugins from the entrypoint list.
     setup(
         name="my-package",
         # ...
-        entry_points={
-            "airflow.plugins": ["my_plugin = my_package.my_plugin:MyAirflowPlugin"]
-        },
+        entry_points={"airflow.plugins": ["my_plugin = my_package.my_plugin:MyAirflowPlugin"]},
     )
 
 Automatic reloading webserver
diff --git a/docs/apache-airflow/security/webserver.rst b/docs/apache-airflow/security/webserver.rst
index cee3ed9e3b..fbbf938df0 100644
--- a/docs/apache-airflow/security/webserver.rst
+++ b/docs/apache-airflow/security/webserver.rst
@@ -150,9 +150,7 @@ Here is an example of what you might have in your webserver_config.py:
 
     AUTH_TYPE = AUTH_OAUTH
     AUTH_ROLES_SYNC_AT_LOGIN = True  # Checks roles on every login
-    AUTH_USER_REGISTRATION = (
-        True  # allow users who are not already in the FAB DB to register
-    )
+    AUTH_USER_REGISTRATION = True  # allow users who are not already in the FAB DB to register
     # Make sure to replace this with the path to your security manager class
     FAB_SECURITY_MANAGER_CLASS = "your_module.your_security_manager_class"
     AUTH_ROLES_MAPPING = {
@@ -219,9 +217,7 @@ webserver_config.py itself if you wish.
         # In this example, the oauth provider == 'github'.
         # If you ever want to support other providers, see how it is done here:
         # https://github.com/dpgaspar/Flask-AppBuilder/blob/master/flask_appbuilder/security/manager.py#L550
-        def get_oauth_user_info(
-            self, provider: str, resp: Any
-        ) -> Dict[str, Union[str, List[str]]]:
+        def get_oauth_user_info(self, provider: str, resp: Any) -> Dict[str, Union[str, List[str]]]:
 
             # Creates the user info payload from Github.
             # The user previously allowed your app to act on their behalf,
diff --git a/docs/apache-airflow/upgrading-from-1-10/index.rst b/docs/apache-airflow/upgrading-from-1-10/index.rst
index 334753e52b..dd58b2f4f4 100644
--- a/docs/apache-airflow/upgrading-from-1-10/index.rst
+++ b/docs/apache-airflow/upgrading-from-1-10/index.rst
@@ -182,9 +182,7 @@ Whereas previously a user would import each individual class to build the pod as
 
     volume_config = {"persistentVolumeClaim": {"claimName": "test-volume"}}
     volume = Volume(name="test-volume", configs=volume_config)
-    volume_mount = VolumeMount(
-        "test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True
-    )
+    volume_mount = VolumeMount("test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True)
 
     port = Port("http", 80)
     secret_file = Secret("volume", "/etc/sql_conn", "airflow-secrets", "sql_alchemy_conn")
@@ -224,9 +222,7 @@ Now the user can use the ``kubernetes.client.models`` class as a single point of
 
     volume = k8s.V1Volume(
         name="test-volume",
-        persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(
-            claim_name="test-volume"
-        ),
+        persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(claim_name="test-volume"),
     )
 
     port = k8s.V1ContainerPort(name="http", container_port=80)
@@ -602,9 +598,7 @@ Before:
 
     from airflow.kubernetes.volume_mount import VolumeMount
 
-    volume_mount = VolumeMount(
-        "test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True
-    )
+    volume_mount = VolumeMount("test-volume", mount_path="/root/mount_file", sub_path=None, read_only=True)
     k = KubernetesPodOperator(
         namespace="default",
         image="ubuntu:16.04",
@@ -660,9 +654,7 @@ After:
 
     volume = k8s.V1Volume(
         name="test-volume",
-        persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(
-            claim_name="test-volume"
-        ),
+        persistent_volume_claim=k8s.V1PersistentVolumeClaimVolumeSource(claim_name="test-volume"),
     )
     k = KubernetesPodOperator(
         namespace="default",
@@ -737,9 +729,7 @@ After:
     env_vars = [
         k8s.V1EnvVar(
             name="ENV3",
-            value_from=k8s.V1EnvVarSource(
-                field_ref=k8s.V1ObjectFieldSelector(field_path="status.podIP")
-            ),
+            value_from=k8s.V1EnvVarSource(field_ref=k8s.V1ObjectFieldSelector(field_path="status.podIP")),
         )
     ]
 
@@ -777,9 +767,7 @@ After:
     from kubernetes.client import models as k8s
 
     configmap = "test-configmap"
-    env_from = [
-        k8s.V1EnvFromSource(config_map_ref=k8s.V1ConfigMapEnvSource(name=configmap))
-    ]
+    env_from = [k8s.V1EnvFromSource(config_map_ref=k8s.V1ConfigMapEnvSource(name=configmap))]
 
     k = KubernetesPodOperator(
         namespace="default",
@@ -1149,9 +1137,7 @@ non-RBAC UI (``flask-admin`` based UI), update it to use ``flask_appbuilder_view
 
     v = TestView(category="Test Plugin", name="Test View")
 
-    ml = MenuLink(
-        category="Test Plugin", name="Test Menu Link", url="https://airflow.apache.org/"
-    )
+    ml = MenuLink(category="Test Plugin", name="Test Menu Link", url="https://airflow.apache.org/")
 
 
     class AirflowTestPlugin(AirflowPlugin):
diff --git a/tests/dags_corrupted/README.md b/tests/dags_corrupted/README.md
index 5c3a0278de..d91e568a61 100644
--- a/tests/dags_corrupted/README.md
+++ b/tests/dags_corrupted/README.md
@@ -25,9 +25,7 @@ Python interpreter from loading this file.
 To access a DAG in this folder, use the following code inside a unit test.
 
 ```python
-TEST_DAG_FOLDER = os.path.join(
-    os.path.dirname(os.path.realpath(__file__)), "dags_corrupted"
-)
+TEST_DAG_FOLDER = os.path.join(os.path.dirname(os.path.realpath(__file__)), "dags_corrupted")
 
 dagbag = DagBag(dag_folder=TEST_DAG_FOLDER)
 dag = dagbag.get_dag(dag_id)
diff --git a/tests/test_utils/perf/perf_kit/__init__.py b/tests/test_utils/perf/perf_kit/__init__.py
index f9b0fe23ef..13252f4569 100644
--- a/tests/test_utils/perf/perf_kit/__init__.py
+++ b/tests/test_utils/perf/perf_kit/__init__.py
@@ -75,10 +75,7 @@ Suppose we have the following fragment of the file with tests.
 
     def test_bulk_write_to_db(self):
         clear_db_dags()
-        dags = [
-            DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"])
-            for i in range(0, 4)
-        ]
+        dags = [DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) for i in range(0, 4)]
 
         with assert_queries_count(3):
             DAG.bulk_write_to_db(dags)
@@ -101,10 +98,7 @@ queries in it.
     @trace_queries
     def test_bulk_write_to_db(self):
         clear_db_dags()
-        dags = [
-            DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"])
-            for i in range(0, 4)
-        ]
+        dags = [DAG(f"dag-bulk-sync-{i}", start_date=DEFAULT_DATE, tags=["test-dag"]) for i in range(0, 4)]
 
         with assert_queries_count(3):
             DAG.bulk_write_to_db(dags)