You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/06/22 20:27:18 UTC

[airflow] branch v2-1-test updated (1c0df06 -> 38301c1)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a change to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git.


    from 1c0df06  Switch to GitHub Container Registry by default (#16586)
     new cfe1473  Fetch Helm Chart inventory from remote cache (#16535)
     new 38301c1  Apply pre-commit fixes

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .pre-commit-config.yaml                              | 14 --------------
 BREEZE.rst                                           | 12 ++++++------
 airflow/cli/commands/role_command.py                 |  2 +-
 airflow/cli/commands/task_command.py                 |  2 +-
 airflow/cli/simple_table.py                          |  2 +-
 airflow/configuration.py                             |  2 +-
 airflow/decorators/base.py                           |  8 +++-----
 airflow/executors/base_executor.py                   |  2 +-
 airflow/executors/celery_executor.py                 |  2 +-
 airflow/executors/debug_executor.py                  |  2 +-
 airflow/jobs/backfill_job.py                         |  2 +-
 airflow/jobs/scheduler_job.py                        |  8 ++++----
 airflow/models/dag.py                                |  6 +++---
 airflow/models/dagbag.py                             |  8 ++++----
 airflow/models/taskinstance.py                       |  2 +-
 airflow/operators/bash.py                            |  2 +-
 airflow/operators/sql.py                             |  2 +-
 .../amazon/aws/log/cloudwatch_task_handler.py        |  2 +-
 .../apache/druid/transfers/hive_to_druid.py          |  2 +-
 airflow/providers/apache/hive/hooks/hive.py          |  8 ++++----
 .../providers/apache/hive/operators/hive_stats.py    |  2 +-
 .../providers/cncf/kubernetes/backcompat/volume.py   |  2 +-
 .../cncf/kubernetes/operators/kubernetes_pod.py      |  2 +-
 .../providers/elasticsearch/log/es_task_handler.py   |  2 +-
 airflow/providers/google/cloud/hooks/dataflow.py     |  2 +-
 airflow/providers/google/cloud/hooks/gdm.py          |  2 +-
 .../providers/microsoft/azure/hooks/azure_batch.py   |  2 +-
 airflow/stats.py                                     |  2 +-
 airflow/utils/process_utils.py                       |  4 ++--
 airflow/utils/strings.py                             |  2 +-
 airflow/utils/task_group.py                          |  8 +++-----
 airflow/www/views.py                                 | 20 ++++++++++----------
 dev/check_files.py                                   |  2 +-
 dev/provider_packages/prepare_provider_packages.py   |  2 +-
 docs/exts/docs_build/docs_builder.py                 |  4 ++--
 docs/exts/docs_build/fetch_inventories.py            | 13 +++++++------
 metastore_browser/hive_metastore.py                  |  4 ++--
 .../in_container/update_quarantined_test_status.py   |  2 +-
 tests/always/test_project_structure.py               |  6 +++---
 tests/core/test_core.py                              |  2 +-
 tests/hooks/test_subprocess.py                       |  2 +-
 tests/jobs/test_backfill_job.py                      |  2 +-
 .../amazon/aws/operators/test_s3_delete_objects.py   |  4 ++--
 tests/providers/apache/hive/hooks/test_hive.py       |  2 +-
 tests/providers/google/cloud/hooks/test_dataflow.py  |  2 +-
 .../google/cloud/operators/test_dataflow_system.py   |  3 +--
 tests/test_utils/logging_command_executor.py         | 10 +++++-----
 tests/test_utils/perf/dags/elastic_dag.py            |  2 +-
 tests/test_utils/perf/scheduler_ops_metrics.py       |  2 +-
 49 files changed, 93 insertions(+), 111 deletions(-)

[airflow] 01/02: Fetch Helm Chart inventory from remote cache (#16535)

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit cfe1473c2ffbc636dc2a6de61c2100a4167e87d0
Author: Kamil BreguĊ‚a <mi...@users.noreply.github.com>
AuthorDate: Sat Jun 19 03:20:32 2021 +0200

    Fetch Helm Chart inventory from remote cache (#16535)
    
    (cherry picked from commit 609620a39c79dc410943e5fcce0425f6ef32cd3e)
---
 docs/exts/docs_build/fetch_inventories.py | 13 +++++++------
 1 file changed, 7 insertions(+), 6 deletions(-)

diff --git a/docs/exts/docs_build/fetch_inventories.py b/docs/exts/docs_build/fetch_inventories.py
index 40ef28e..4eb4229 100644
--- a/docs/exts/docs_build/fetch_inventories.py
+++ b/docs/exts/docs_build/fetch_inventories.py
@@ -92,13 +92,14 @@ def fetch_inventories():
                 f'{CACHE_DIR}/{pkg_name}/objects.inv',
             )
         )
-    to_download.append(
-        (
-            "apache-airflow",
-            S3_DOC_URL_VERSIONED.format(package_name='apache-airflow'),
-            f'{CACHE_DIR}/apache-airflow/objects.inv',
+    for pkg_name in ['apache-airflow', 'helm-chart']:
+        to_download.append(
+            (
+                pkg_name,
+                S3_DOC_URL_VERSIONED.format(package_name=pkg_name),
+                f'{CACHE_DIR}/{pkg_name}/objects.inv',
+            )
         )
-    )
     for pkg_name in ['apache-airflow-providers', 'docker-stack']:
         to_download.append(
             (

[airflow] 02/02: Apply pre-commit fixes

Posted by po...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 38301c16ee3acdfd3de6128fae5056b25cc14e4c
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Tue Jun 22 22:11:12 2021 +0200

    Apply pre-commit fixes
---
 .pre-commit-config.yaml                              | 14 --------------
 BREEZE.rst                                           | 12 ++++++------
 airflow/cli/commands/role_command.py                 |  2 +-
 airflow/cli/commands/task_command.py                 |  2 +-
 airflow/cli/simple_table.py                          |  2 +-
 airflow/configuration.py                             |  2 +-
 airflow/decorators/base.py                           |  8 +++-----
 airflow/executors/base_executor.py                   |  2 +-
 airflow/executors/celery_executor.py                 |  2 +-
 airflow/executors/debug_executor.py                  |  2 +-
 airflow/jobs/backfill_job.py                         |  2 +-
 airflow/jobs/scheduler_job.py                        |  8 ++++----
 airflow/models/dag.py                                |  6 +++---
 airflow/models/dagbag.py                             |  8 ++++----
 airflow/models/taskinstance.py                       |  2 +-
 airflow/operators/bash.py                            |  2 +-
 airflow/operators/sql.py                             |  2 +-
 .../amazon/aws/log/cloudwatch_task_handler.py        |  2 +-
 .../apache/druid/transfers/hive_to_druid.py          |  2 +-
 airflow/providers/apache/hive/hooks/hive.py          |  8 ++++----
 .../providers/apache/hive/operators/hive_stats.py    |  2 +-
 .../providers/cncf/kubernetes/backcompat/volume.py   |  2 +-
 .../cncf/kubernetes/operators/kubernetes_pod.py      |  2 +-
 .../providers/elasticsearch/log/es_task_handler.py   |  2 +-
 airflow/providers/google/cloud/hooks/dataflow.py     |  2 +-
 airflow/providers/google/cloud/hooks/gdm.py          |  2 +-
 .../providers/microsoft/azure/hooks/azure_batch.py   |  2 +-
 airflow/stats.py                                     |  2 +-
 airflow/utils/process_utils.py                       |  4 ++--
 airflow/utils/strings.py                             |  2 +-
 airflow/utils/task_group.py                          |  8 +++-----
 airflow/www/views.py                                 | 20 ++++++++++----------
 dev/check_files.py                                   |  2 +-
 dev/provider_packages/prepare_provider_packages.py   |  2 +-
 docs/exts/docs_build/docs_builder.py                 |  4 ++--
 metastore_browser/hive_metastore.py                  |  4 ++--
 .../in_container/update_quarantined_test_status.py   |  2 +-
 tests/always/test_project_structure.py               |  6 +++---
 tests/core/test_core.py                              |  2 +-
 tests/hooks/test_subprocess.py                       |  2 +-
 tests/jobs/test_backfill_job.py                      |  2 +-
 .../amazon/aws/operators/test_s3_delete_objects.py   |  4 ++--
 tests/providers/apache/hive/hooks/test_hive.py       |  2 +-
 tests/providers/google/cloud/hooks/test_dataflow.py  |  2 +-
 .../google/cloud/operators/test_dataflow_system.py   |  3 +--
 tests/test_utils/logging_command_executor.py         | 10 +++++-----
 tests/test_utils/perf/dags/elastic_dag.py            |  2 +-
 tests/test_utils/perf/scheduler_ops_metrics.py       |  2 +-
 48 files changed, 86 insertions(+), 105 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 262c230..f27aeb0 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -609,20 +609,6 @@ repos:
         additional_dependencies: ['flynt==0.63']
         files: \.py$
         exclude: ^airflow/_vendor/
-      - id: ui-lint
-        name: ESLint against airflow/ui
-        language: node
-        'types_or': [javascript, tsx, ts]
-        files: ^airflow/ui/
-        entry: scripts/ci/static_checks/ui_lint.sh
-        pass_filenames: false
-      - id: www-lint
-        name: ESLint against current UI js files
-        language: node
-        'types_or': [javascript]
-        files: ^airflow/www/static/js/
-        entry: scripts/ci/static_checks/www_lint.sh
-        pass_filenames: false
       - id: bats-in-container-tests
         name: Run in container bats tests
         language: system
diff --git a/BREEZE.rst b/BREEZE.rst
index c05a3e7..9b000b3 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -2249,12 +2249,12 @@ This is the current syntax for  `./breeze <./breeze>`_:
                  incorrect-use-of-LoggingMixin insert-license isort json-schema language-matters
                  lint-dockerfile lint-openapi markdownlint mermaid mixed-line-ending mypy mypy-helm
                  no-providers-in-core-examples no-relative-imports pre-commit-descriptions
-                 pre-commit-hook-names provide-create-sessions providers-changelogs providers-init-file provider-yamls
-                 pydevd pydocstyle pylint pylint-tests python-no-log-warn pyupgrade
-                 restrict-start_date rst-backticks setup-order setup-extra-packages shellcheck
-                 sort-in-the-wild sort-spelling-wordlist stylelint trailing-whitespace ui-lint
-                 update-breeze-file update-extras update-local-yml-file update-setup-cfg-file
-                 verify-db-migrations-documented version-sync yamllint
+                 pre-commit-hook-names provide-create-sessions providers-changelogs
+                 providers-init-file provider-yamls pydevd pydocstyle pylint pylint-tests
+                 python-no-log-warn pyupgrade restrict-start_date rst-backticks setup-order
+                 setup-extra-packages shellcheck sort-in-the-wild sort-spelling-wordlist stylelint
+                 trailing-whitespace ui-lint update-breeze-file update-extras update-local-yml-file
+                 update-setup-cfg-file verify-db-migrations-documented version-sync yamllint
 
         You can pass extra arguments including options to the pre-commit framework as
         <EXTRA_ARGS> passed after --. For example:
diff --git a/airflow/cli/commands/role_command.py b/airflow/cli/commands/role_command.py
index b017cd1..0d6ab33 100644
--- a/airflow/cli/commands/role_command.py
+++ b/airflow/cli/commands/role_command.py
@@ -30,7 +30,7 @@ def roles_list(args):
     appbuilder = cached_app().appbuilder  # pylint: disable=no-member
     roles = appbuilder.sm.get_all_roles()
     AirflowConsole().print_as(
-        data=sorted([r.name for r in roles]), output=args.output, mapper=lambda x: {"name": x}
+        data=sorted(r.name for r in roles), output=args.output, mapper=lambda x: {"name": x}
     )
 
 
diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py
index ee34646..c0cfb03 100644
--- a/airflow/cli/commands/task_command.py
+++ b/airflow/cli/commands/task_command.py
@@ -287,7 +287,7 @@ def task_list(args, dag=None):
     if args.tree:
         dag.tree_view()
     else:
-        tasks = sorted([t.task_id for t in dag.tasks])
+        tasks = sorted(t.task_id for t in dag.tasks)
         print("\n".join(tasks))
 
 
diff --git a/airflow/cli/simple_table.py b/airflow/cli/simple_table.py
index 515571c..65e846e 100644
--- a/airflow/cli/simple_table.py
+++ b/airflow/cli/simple_table.py
@@ -61,7 +61,7 @@ class AirflowConsole(Console):
             table.add_column(col)
 
         for row in data:
-            table.add_row(*[str(d) for d in row.values()])
+            table.add_row(*(str(d) for d in row.values()))
         self.print(table)
 
     def print_as_plain_table(self, data: List[Dict]):
diff --git a/airflow/configuration.py b/airflow/configuration.py
index c3595d7..263ba4b 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -67,7 +67,7 @@ def run_command(command):
     process = subprocess.Popen(
         shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True
     )
-    output, stderr = [stream.decode(sys.getdefaultencoding(), 'ignore') for stream in process.communicate()]
+    output, stderr = (stream.decode(sys.getdefaultencoding(), 'ignore') for stream in process.communicate())
 
     if process.returncode != 0:
         raise AirflowConfigException(
diff --git a/airflow/decorators/base.py b/airflow/decorators/base.py
index 3307f05..2f17980 100644
--- a/airflow/decorators/base.py
+++ b/airflow/decorators/base.py
@@ -70,11 +70,9 @@ def get_unique_task_id(
         return task_id
     core = re.split(r'__\d+$', task_id)[0]
     suffixes = sorted(
-        [
-            int(re.split(r'^.+__', task_id)[1])
-            for task_id in dag.task_ids
-            if re.match(rf'^{core}__\d+$', task_id)
-        ]
+        int(re.split(r'^.+__', task_id)[1])
+        for task_id in dag.task_ids
+        if re.match(rf'^{core}__\d+$', task_id)
     )
     if not suffixes:
         return f'{core}__1'
diff --git a/airflow/executors/base_executor.py b/airflow/executors/base_executor.py
index 8e87901..f657a0d 100644
--- a/airflow/executors/base_executor.py
+++ b/airflow/executors/base_executor.py
@@ -168,7 +168,7 @@ class BaseExecutor(LoggingMixin):
         :return: List of tuples from the queued_tasks according to the priority.
         """
         return sorted(
-            [(k, v) for k, v in self.queued_tasks.items()],  # pylint: disable=unnecessary-comprehension
+            ((k, v) for k, v in self.queued_tasks.items()),  # pylint: disable=unnecessary-comprehension
             key=lambda x: x[1][1],
             reverse=True,
         )
diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py
index 567fe58..4c976cf 100644
--- a/airflow/executors/celery_executor.py
+++ b/airflow/executors/celery_executor.py
@@ -366,7 +366,7 @@ class CeleryExecutor(BaseExecutor):
                 "Adopted tasks were still pending after %s, assuming they never made it to celery and "
                 "clearing:\n\t%s",
                 self.task_adoption_timeout,
-                "\n\t".join([repr(x) for x in timedout_keys]),
+                "\n\t".join(repr(x) for x in timedout_keys),
             )
             for key in timedout_keys:
                 self.change_state(key, State.FAILED)
diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py
index 25aace0..3bf784c 100644
--- a/airflow/executors/debug_executor.py
+++ b/airflow/executors/debug_executor.py
@@ -120,7 +120,7 @@ class DebugExecutor(BaseExecutor):
         :param open_slots: Number of open slots
         """
         sorted_queue = sorted(
-            [(k, v) for k, v in self.queued_tasks.items()],  # pylint: disable=unnecessary-comprehension
+            ((k, v) for k, v in self.queued_tasks.items()),  # pylint: disable=unnecessary-comprehension
             key=lambda x: x[1][1],
             reverse=True,
         )
diff --git a/airflow/jobs/backfill_job.py b/airflow/jobs/backfill_job.py
index a16f261..b306bc4 100644
--- a/airflow/jobs/backfill_job.py
+++ b/airflow/jobs/backfill_job.py
@@ -897,7 +897,7 @@ class BackfillJob(BaseJob):
 
         reset_tis = helpers.reduce_in_chunks(query, tis_to_reset, [], self.max_tis_per_query)
 
-        task_instance_str = '\n\t'.join([repr(x) for x in reset_tis])
+        task_instance_str = '\n\t'.join(repr(x) for x in reset_tis)
         session.commit()
 
         self.log.info("Reset the following %s TaskInstances:\n\t%s", len(reset_tis), task_instance_str)
diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py
index 7514d92..1340fb8 100644
--- a/airflow/jobs/scheduler_job.py
+++ b/airflow/jobs/scheduler_job.py
@@ -461,9 +461,9 @@ class DagFileProcessor(LoggingMixin):
                     session.delete(ti)
                     session.commit()
 
-            task_list = "\n".join([sla.task_id + ' on ' + sla.execution_date.isoformat() for sla in slas])
+            task_list = "\n".join(sla.task_id + ' on ' + sla.execution_date.isoformat() for sla in slas)
             blocking_task_list = "\n".join(
-                [ti.task_id + ' on ' + ti.execution_date.isoformat() for ti in blocking_tis]
+                ti.task_id + ' on ' + ti.execution_date.isoformat() for ti in blocking_tis
             )
             # Track whether email or any alert notification sent
             # We consider email or the alert callback as notifications
@@ -960,7 +960,7 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
             return executable_tis
 
         # Put one task instance on each line
-        task_instance_str = "\n\t".join([repr(x) for x in task_instances_to_examine])
+        task_instance_str = "\n\t".join(repr(x) for x in task_instances_to_examine)
         self.log.info("%s tasks up for execution:\n\t%s", len(task_instances_to_examine), task_instance_str)
 
         pool_to_task_instances: DefaultDict[str, List[models.Pool]] = defaultdict(list)
@@ -1082,7 +1082,7 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
         Stats.gauge('scheduler.tasks.running', num_tasks_in_executor)
         Stats.gauge('scheduler.tasks.executable', len(executable_tis))
 
-        task_instance_str = "\n\t".join([repr(x) for x in executable_tis])
+        task_instance_str = "\n\t".join(repr(x) for x in executable_tis)
         self.log.info("Setting the following tasks to queued state:\n\t%s", task_instance_str)
 
         # set TIs to queued state
diff --git a/airflow/models/dag.py b/airflow/models/dag.py
index 616ae6f..53c3fea 100644
--- a/airflow/models/dag.py
+++ b/airflow/models/dag.py
@@ -638,7 +638,7 @@ class DAG(LoggingMixin):
         using_end_date = end_date
 
         # dates for dag runs
-        using_start_date = using_start_date or min([t.start_date for t in self.tasks])
+        using_start_date = using_start_date or min(t.start_date for t in self.tasks)
         using_end_date = using_end_date or timezone.utcnow()
 
         # next run date for a subdag isn't relevant (schedule_interval for subdags
@@ -1338,7 +1338,7 @@ class DAG(LoggingMixin):
         if count == 0:
             return 0
         if confirm_prompt:
-            ti_list = "\n".join([str(t) for t in tis])
+            ti_list = "\n".join(str(t) for t in tis)
             question = (
                 "You are about to delete these {count} tasks:\n{ti_list}\n\nAre you sure? (yes/no): "
             ).format(count=count, ti_list=ti_list)
@@ -1396,7 +1396,7 @@ class DAG(LoggingMixin):
             print("Nothing to clear.")
             return 0
         if confirm_prompt:
-            ti_list = "\n".join([str(t) for t in all_tis])
+            ti_list = "\n".join(str(t) for t in all_tis)
             question = f"You are about to delete these {count} tasks:\n{ti_list}\n\nAre you sure? (yes/no): "
             do_it = utils.helpers.ask_yesno(question)
 
diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py
index be2701b..ae21b90 100644
--- a/airflow/models/dagbag.py
+++ b/airflow/models/dagbag.py
@@ -516,7 +516,7 @@ class DagBag(LoggingMixin):
                         file=filepath.replace(settings.DAGS_FOLDER, ''),
                         duration=file_parse_end_dttm - file_parse_start_dttm,
                         dag_num=len(found_dags),
-                        task_num=sum([len(dag.tasks) for dag in found_dags]),
+                        task_num=sum(len(dag.tasks) for dag in found_dags),
                         dags=str([dag.dag_id for dag in found_dags]),
                     )
                 )
@@ -549,9 +549,9 @@ class DagBag(LoggingMixin):
         """Prints a report around DagBag loading stats"""
         stats = self.dagbag_stats
         dag_folder = self.dag_folder
-        duration = sum([o.duration for o in stats], timedelta()).total_seconds()
-        dag_num = sum([o.dag_num for o in stats])
-        task_num = sum([o.task_num for o in stats])
+        duration = sum((o.duration for o in stats), timedelta()).total_seconds()
+        dag_num = sum(o.dag_num for o in stats)
+        task_num = sum(o.task_num for o in stats)
         table = tabulate(stats, headers="keys")
 
         report = textwrap.dedent(
diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py
index b77f0d7..2def707 100644
--- a/airflow/models/taskinstance.py
+++ b/airflow/models/taskinstance.py
@@ -1299,7 +1299,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
             airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True)
             self.log.info(
                 "Exporting the following env vars:\n%s",
-                '\n'.join([f"{k}={v}" for k, v in airflow_context_vars.items()]),
+                '\n'.join(f"{k}={v}" for k, v in airflow_context_vars.items()),
             )
 
             os.environ.update(airflow_context_vars)
diff --git a/airflow/operators/bash.py b/airflow/operators/bash.py
index 5f6a68d..1cc85e1 100644
--- a/airflow/operators/bash.py
+++ b/airflow/operators/bash.py
@@ -162,7 +162,7 @@ class BashOperator(BaseOperator):
         airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True)
         self.log.debug(
             'Exporting the following env vars:\n%s',
-            '\n'.join([f"{k}={v}" for k, v in airflow_context_vars.items()]),
+            '\n'.join(f"{k}={v}" for k, v in airflow_context_vars.items()),
         )
         env.update(airflow_context_vars)
         return env
diff --git a/airflow/operators/sql.py b/airflow/operators/sql.py
index 092adc7..769cf26 100644
--- a/airflow/operators/sql.py
+++ b/airflow/operators/sql.py
@@ -463,7 +463,7 @@ class SQLThresholdCheckOperator(BaseSQLOperator):
         Optional: Send data check info and metadata to an external database.
         Default functionality will log metadata.
         """
-        info = "\n".join([f"""{key}: {item}""" for key, item in meta_data.items()])
+        info = "\n".join(f"""{key}: {item}""" for key, item in meta_data.items())
         self.log.info("Log from %s:\n%s", self.dag_id, info)
 
 
diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
index a32de11..8584c1c 100644
--- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
+++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py
@@ -121,7 +121,7 @@ class CloudwatchTaskHandler(FileTaskHandler, LoggingMixin):
                 )
             )
 
-            return '\n'.join([self._event_to_str(event) for event in events])
+            return '\n'.join(self._event_to_str(event) for event in events)
         except Exception:  # pylint: disable=broad-except
             msg = 'Could not read remote logs from log_group: {} log_stream: {}.'.format(
                 self.log_group, stream_name
diff --git a/airflow/providers/apache/druid/transfers/hive_to_druid.py b/airflow/providers/apache/druid/transfers/hive_to_druid.py
index f60a91a..7c77ae5 100644
--- a/airflow/providers/apache/druid/transfers/hive_to_druid.py
+++ b/airflow/providers/apache/druid/transfers/hive_to_druid.py
@@ -121,7 +121,7 @@ class HiveToDruidOperator(BaseOperator):
         self.log.info("Extracting data from Hive")
         hive_table = 'druid.' + context['task_instance_key_str'].replace('.', '_')
         sql = self.sql.strip().strip(';')
-        tblproperties = ''.join([f", '{k}' = '{v}'" for k, v in self.hive_tblproperties.items()])
+        tblproperties = ''.join(f", '{k}' = '{v}'" for k, v in self.hive_tblproperties.items())
         hql = f"""\
         SET mapred.output.compress=false;
         SET hive.exec.compress.output=false;
diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py
index 3d8d713..e2f3030 100644
--- a/airflow/providers/apache/hive/hooks/hive.py
+++ b/airflow/providers/apache/hive/hooks/hive.py
@@ -429,16 +429,16 @@ class HiveCliHook(BaseHook):
         if create or recreate:
             if field_dict is None:
                 raise ValueError("Must provide a field dict when creating a table")
-            fields = ",\n    ".join([f"`{k.strip('`')}` {v}" for k, v in field_dict.items()])
+            fields = ",\n    ".join(f"`{k.strip('`')}` {v}" for k, v in field_dict.items())
             hql += f"CREATE TABLE IF NOT EXISTS {table} (\n{fields})\n"
             if partition:
-                pfields = ",\n    ".join([p + " STRING" for p in partition])
+                pfields = ",\n    ".join(p + " STRING" for p in partition)
                 hql += f"PARTITIONED BY ({pfields})\n"
             hql += "ROW FORMAT DELIMITED\n"
             hql += f"FIELDS TERMINATED BY '{delimiter}'\n"
             hql += "STORED AS textfile\n"
             if tblproperties is not None:
-                tprops = ", ".join([f"'{k}'='{v}'" for k, v in tblproperties.items()])
+                tprops = ", ".join(f"'{k}'='{v}'" for k, v in tblproperties.items())
                 hql += f"TBLPROPERTIES({tprops})\n"
             hql += ";"
             self.log.info(hql)
@@ -448,7 +448,7 @@ class HiveCliHook(BaseHook):
             hql += "OVERWRITE "
         hql += f"INTO TABLE {table} "
         if partition:
-            pvals = ", ".join([f"{k}='{v}'" for k, v in partition.items()])
+            pvals = ", ".join(f"{k}='{v}'" for k, v in partition.items())
             hql += f"PARTITION ({pvals})"
 
         # As a workaround for HIVE-10541, add a newline character
diff --git a/airflow/providers/apache/hive/operators/hive_stats.py b/airflow/providers/apache/hive/operators/hive_stats.py
index 1c08ef1..eaff440 100644
--- a/airflow/providers/apache/hive/operators/hive_stats.py
+++ b/airflow/providers/apache/hive/operators/hive_stats.py
@@ -132,7 +132,7 @@ class HiveStatsCollectionOperator(BaseOperator):
             exprs.update(assign_exprs)
         exprs.update(self.extra_exprs)
         exprs = OrderedDict(exprs)
-        exprs_str = ",\n        ".join([v + " AS " + k[0] + '__' + k[1] for k, v in exprs.items()])
+        exprs_str = ",\n        ".join(v + " AS " + k[0] + '__' + k[1] for k, v in exprs.items())
 
         where_clause_ = [f"{k} = '{v}'" for k, v in self.partition.items()]
         where_clause = " AND\n        ".join(where_clause_)
diff --git a/airflow/providers/cncf/kubernetes/backcompat/volume.py b/airflow/providers/cncf/kubernetes/backcompat/volume.py
index bd6a128..e5b4d00 100644
--- a/airflow/providers/cncf/kubernetes/backcompat/volume.py
+++ b/airflow/providers/cncf/kubernetes/backcompat/volume.py
@@ -61,4 +61,4 @@ class Volume:
     # source: https://www.geeksforgeeks.org/python-program-to-convert-camel-case-string-to-snake-case/
     @staticmethod
     def _convert_to_snake_case(input_string):
-        return ''.join(['_' + i.lower() if i.isupper() else i for i in input_string]).lstrip('_')
+        return ''.join('_' + i.lower() if i.isupper() else i for i in input_string).lstrip('_')
diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
index d6b2eb2..32cc9c9 100644
--- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
+++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py
@@ -407,7 +407,7 @@ class KubernetesPodOperator(BaseOperator):  # pylint: disable=too-many-instance-
     @staticmethod
     def _get_pod_identifying_label_string(labels) -> str:
         filtered_labels = {label_id: label for label_id, label in labels.items() if label_id != 'try_number'}
-        return ','.join([label_id + '=' + label for label_id, label in sorted(filtered_labels.items())])
+        return ','.join(label_id + '=' + label for label_id, label in sorted(filtered_labels.items()))
 
     @staticmethod
     def _try_numbers_match(context, pod) -> bool:
diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/airflow/providers/elasticsearch/log/es_task_handler.py
index 16e4d65..ae08ecd 100644
--- a/airflow/providers/elasticsearch/log/es_task_handler.py
+++ b/airflow/providers/elasticsearch/log/es_task_handler.py
@@ -198,7 +198,7 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin):
         # to prevent it from showing in the UI.
         def concat_logs(lines):
             log_range = (len(lines) - 1) if lines[-1].message == self.end_of_log_mark.strip() else len(lines)
-            return '\n'.join([self._format_msg(lines[i]) for i in range(log_range)])
+            return '\n'.join(self._format_msg(lines[i]) for i in range(log_range))
 
         message = [(host, concat_logs(hosted_log)) for host, hosted_log in logs_by_host]
 
diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/airflow/providers/google/cloud/hooks/dataflow.py
index 7c53507..ebdbfa3 100644
--- a/airflow/providers/google/cloud/hooks/dataflow.py
+++ b/airflow/providers/google/cloud/hooks/dataflow.py
@@ -1006,7 +1006,7 @@ class DataflowHook(GoogleBaseHook):
             f"--region={location}",
             *(beam_options_to_args(options)),
         ]
-        self.log.info("Executing command: %s", " ".join([shlex.quote(c) for c in cmd]))
+        self.log.info("Executing command: %s", " ".join(shlex.quote(c) for c in cmd))
         with self.provide_authorized_gcloud():
             proc = subprocess.run(  # pylint: disable=subprocess-run-check
                 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
diff --git a/airflow/providers/google/cloud/hooks/gdm.py b/airflow/providers/google/cloud/hooks/gdm.py
index f11a390..6ff7338 100644
--- a/airflow/providers/google/cloud/hooks/gdm.py
+++ b/airflow/providers/google/cloud/hooks/gdm.py
@@ -108,5 +108,5 @@ class GoogleDeploymentManagerHook(GoogleBaseHook):  # pylint: disable=abstract-m
         resp = request.execute()
         if 'error' in resp.keys():
             raise AirflowException(
-                'Errors deleting deployment: ', ', '.join([err['message'] for err in resp['error']['errors']])
+                'Errors deleting deployment: ', ', '.join(err['message'] for err in resp['error']['errors'])
             )
diff --git a/airflow/providers/microsoft/azure/hooks/azure_batch.py b/airflow/providers/microsoft/azure/hooks/azure_batch.py
index 153a2f7..f9f0db0 100644
--- a/airflow/providers/microsoft/azure/hooks/azure_batch.py
+++ b/airflow/providers/microsoft/azure/hooks/azure_batch.py
@@ -286,7 +286,7 @@ class AzureBatchHook(BaseHook):
             # refresh pool to ensure that there is no resize error
             pool = self.connection.pool.get(pool_id)
             if pool.resize_errors is not None:
-                resize_errors = "\n".join([repr(e) for e in pool.resize_errors])
+                resize_errors = "\n".join(repr(e) for e in pool.resize_errors)
                 raise RuntimeError(f'resize error encountered for pool {pool.id}:\n{resize_errors}')
             nodes = list(self.connection.compute_node.list(pool.id))
             if len(nodes) >= pool.target_dedicated_nodes and all(node.state in node_state for node in nodes):
diff --git a/airflow/stats.py b/airflow/stats.py
index 34677da..ad4d241 100644
--- a/airflow/stats.py
+++ b/airflow/stats.py
@@ -244,7 +244,7 @@ class AllowListValidator:
     def __init__(self, allow_list=None):
         if allow_list:
             # pylint: disable=consider-using-generator
-            self.allow_list = tuple([item.strip().lower() for item in allow_list.split(',')])
+            self.allow_list = tuple(item.strip().lower() for item in allow_list.split(','))
         else:
             self.allow_list = None
 
diff --git a/airflow/utils/process_utils.py b/airflow/utils/process_utils.py
index b76ca7d..1fbaccd 100644
--- a/airflow/utils/process_utils.py
+++ b/airflow/utils/process_utils.py
@@ -132,7 +132,7 @@ def execute_in_subprocess(cmd: List[str]):
     :param cmd: command and arguments to run
     :type cmd: List[str]
     """
-    log.info("Executing cmd: %s", " ".join([shlex.quote(c) for c in cmd]))
+    log.info("Executing cmd: %s", " ".join(shlex.quote(c) for c in cmd))
     with subprocess.Popen(
         cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=0, close_fds=True
     ) as proc:
@@ -153,7 +153,7 @@ def execute_interactive(cmd: List[str], **kwargs):
     state after the process is completed e.g. if the subprocess hides the cursor, it will be restored after
     the process is completed.
     """
-    log.info("Executing cmd: %s", " ".join([shlex.quote(c) for c in cmd]))
+    log.info("Executing cmd: %s", " ".join(shlex.quote(c) for c in cmd))
 
     old_tty = termios.tcgetattr(sys.stdin)
     tty.setraw(sys.stdin.fileno())
diff --git a/airflow/utils/strings.py b/airflow/utils/strings.py
index 8ae735c..8d73914 100644
--- a/airflow/utils/strings.py
+++ b/airflow/utils/strings.py
@@ -22,7 +22,7 @@ from random import choice
 
 def get_random_string(length=8, choices=string.ascii_letters + string.digits):
     """Generate random string"""
-    return ''.join([choice(choices) for _ in range(length)])
+    return ''.join(choice(choices) for _ in range(length))
 
 
 def to_boolean(astring):
diff --git a/airflow/utils/task_group.py b/airflow/utils/task_group.py
index 551eb48..1a2cc7c 100644
--- a/airflow/utils/task_group.py
+++ b/airflow/utils/task_group.py
@@ -106,11 +106,9 @@ class TaskGroup(TaskMixin):
                 raise DuplicateTaskIdFound(f"group_id '{self.group_id}' has already been added to the DAG")
             base = re.split(r'__\d+$', group_id)[0]
             suffixes = sorted(
-                [
-                    int(re.split(r'^.+__', used_group_id)[1])
-                    for used_group_id in self.used_group_ids
-                    if used_group_id is not None and re.match(rf'^{base}__\d+$', used_group_id)
-                ]
+                int(re.split(r'^.+__', used_group_id)[1])
+                for used_group_id in self.used_group_ids
+                if used_group_id is not None and re.match(rf'^{base}__\d+$', used_group_id)
             )
             if not suffixes:
                 self._group_id += '__1'
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 8f4c23a..63f232a 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -1425,7 +1425,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
         )
         failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context))
         if failed_deps:
-            failed_deps_str = ", ".join([f"{dep.dep_name}: {dep.reason}" for dep in failed_deps])
+            failed_deps_str = ", ".join(f"{dep.dep_name}: {dep.reason}" for dep in failed_deps)
             flash(
                 "Could not queue task instance for execution, dependencies not met: "
                 "{}".format(failed_deps_str),
@@ -1579,7 +1579,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
             flash("No task instances to clear", 'error')
             response = redirect(origin)
         else:
-            details = "\n".join([str(t) for t in tis])
+            details = "\n".join(str(t) for t in tis)
 
             response = self.render_template(
                 'airflow/confirm.html',
@@ -1722,7 +1722,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
             return redirect(origin)
 
         else:
-            details = '\n'.join([str(t) for t in new_dag_state])
+            details = '\n'.join(str(t) for t in new_dag_state)
 
             response = self.render_template(
                 'airflow/confirm.html',
@@ -1751,7 +1751,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
             return redirect(origin)
 
         else:
-            details = '\n'.join([str(t) for t in new_dag_state])
+            details = '\n'.join(str(t) for t in new_dag_state)
 
             response = self.render_template(
                 'airflow/confirm.html',
@@ -1845,7 +1845,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
             commit=False,
         )
 
-        details = "\n".join([str(t) for t in to_be_altered])
+        details = "\n".join(str(t) for t in to_be_altered)
 
         response = self.render_template(
             "airflow/confirm.html",
@@ -2344,7 +2344,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
             )
 
         dates = sorted({ti.execution_date for ti in task_instances})
-        max_date = max([ti.execution_date for ti in task_instances]) if dates else None
+        max_date = max(ti.execution_date for ti in task_instances) if dates else None
 
         session.commit()
 
@@ -2423,7 +2423,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
 
         tis = dag.get_task_instances(start_date=min_date, end_date=base_date)
         tries = sorted({ti.try_number for ti in tis})
-        max_date = max([ti.execution_date for ti in tis]) if tries else None
+        max_date = max(ti.execution_date for ti in tis) if tries else None
         chart.create_y_axis('yAxis', format='.02f', custom_format=False, label='Tries')
         chart.axislist['yAxis']['axisLabelDistance'] = '-15'
 
@@ -2512,7 +2512,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
 
         tis = dag.get_task_instances(start_date=min_date, end_date=base_date)
         dates = sorted({ti.execution_date for ti in tis})
-        max_date = max([ti.execution_date for ti in tis]) if dates else None
+        max_date = max(ti.execution_date for ti in tis) if dates else None
 
         session.commit()
 
@@ -2617,7 +2617,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
         tis = sorted(tis, key=lambda ti: ti.start_date)
         ti_fails = list(
             itertools.chain(
-                *[
+                *(
                     (
                         session.query(TaskFail)
                         .filter(
@@ -2628,7 +2628,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
                         .all()
                     )
                     for ti in tis
-                ]
+                )
             )
         )
 
diff --git a/dev/check_files.py b/dev/check_files.py
index 26f825f..9117aad 100644
--- a/dev/check_files.py
+++ b/dev/check_files.py
@@ -189,7 +189,7 @@ def main(check_type: str, path: str, version: str):
     if check_type.upper() == PROVIDERS:
         files = os.listdir(os.path.join(path, "providers"))
         pips = check_providers(files, version)
-        create_docker(PROVIDERS_DOCKER.format("\n".join([f"RUN pip install '{p}'" for p in pips])))
+        create_docker(PROVIDERS_DOCKER.format("\n".join(f"RUN pip install '{p}'" for p in pips)))
         return
 
     if check_type.upper() == AIRFLOW:
diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index 87aab2d..c27c88b 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -2026,7 +2026,7 @@ def verify_provider_classes_for_single_provider(imported_classes: List[str], pro
     full_package_name = f"airflow.providers.{provider_package_id}"
     entity_summaries = get_package_class_summary(full_package_name, imported_classes)
     total, bad = check_if_classes_are_properly_named(entity_summaries)
-    bad += sum([len(entity_summary.wrong_entities) for entity_summary in entity_summaries.values()])
+    bad += sum(len(entity_summary.wrong_entities) for entity_summary in entity_summaries.values())
     if bad != 0:
         print()
         print(f"[red]There are {bad} errors of {total} entities for {provider_package_id}[/]")
diff --git a/docs/exts/docs_build/docs_builder.py b/docs/exts/docs_build/docs_builder.py
index 55ae490..16370f7 100644
--- a/docs/exts/docs_build/docs_builder.py
+++ b/docs/exts/docs_build/docs_builder.py
@@ -160,7 +160,7 @@ class AirflowDocsBuilder:
         if verbose:
             console.print(
                 f"[blue]{self.package_name:60}:[/] Executing cmd: ",
-                " ".join([shlex.quote(c) for c in build_cmd]),
+                " ".join(shlex.quote(c) for c in build_cmd),
             )
             console.print(f"[blue]{self.package_name:60}:[/] The output is hidden until an error occurs.")
         with open(self.log_spelling_filename, "wt") as output:
@@ -235,7 +235,7 @@ class AirflowDocsBuilder:
         if verbose:
             console.print(
                 f"[blue]{self.package_name:60}:[/] Executing cmd: ",
-                " ".join([shlex.quote(c) for c in build_cmd]),
+                " ".join(shlex.quote(c) for c in build_cmd),
             )
         else:
             console.print(
diff --git a/metastore_browser/hive_metastore.py b/metastore_browser/hive_metastore.py
index 462f245..3400899 100644
--- a/metastore_browser/hive_metastore.py
+++ b/metastore_browser/hive_metastore.py
@@ -126,10 +126,10 @@ class MetastoreBrowserView(BaseView):
         """Retrieve objects from TBLS and DBS"""
         where_clause = ''
         if DB_ALLOW_LIST:
-            dbs = ",".join(["'" + db + "'" for db in DB_ALLOW_LIST])
+            dbs = ",".join("'" + db + "'" for db in DB_ALLOW_LIST)
             where_clause = f"AND b.name IN ({dbs})"
         if DB_DENY_LIST:
-            dbs = ",".join(["'" + db + "'" for db in DB_DENY_LIST])
+            dbs = ",".join("'" + db + "'" for db in DB_DENY_LIST)
             where_clause = f"AND b.name NOT IN ({dbs})"
         sql = f"""
         SELECT CONCAT(b.NAME, '.', a.TBL_NAME), TBL_TYPE
diff --git a/scripts/in_container/update_quarantined_test_status.py b/scripts/in_container/update_quarantined_test_status.py
index 06b7831..825a2d8 100755
--- a/scripts/in_container/update_quarantined_test_status.py
+++ b/scripts/in_container/update_quarantined_test_status.py
@@ -165,7 +165,7 @@ def get_table(history_map: Dict[str, TestHistory]) -> str:
             [
                 history.url,
                 "Succeeded" if history.states[0] else "Failed",
-                " ".join([reverse_status_map[state] for state in history.states]),
+                " ".join(reverse_status_map[state] for state in history.states),
                 get_history_status(history),
                 history.comment,
             ]
diff --git a/tests/always/test_project_structure.py b/tests/always/test_project_structure.py
index 560f379..d4d8645 100644
--- a/tests/always/test_project_structure.py
+++ b/tests/always/test_project_structure.py
@@ -225,7 +225,7 @@ class TestGoogleProviderProjectStructure(unittest.TestCase):
 
     def test_example_dags(self):
         operators_modules = itertools.chain(
-            *[self.find_resource_files(resource_type=d) for d in ["operators", "sensors", "transfers"]]
+            *(self.find_resource_files(resource_type=d) for d in ["operators", "sensors", "transfers"])
         )
         example_dags_files = self.find_resource_files(resource_type="example_dags")
         # Generate tuple of department and service e.g. ('marketing_platform', 'display_video')
@@ -337,11 +337,11 @@ class TestOperatorsHooks(unittest.TestCase):
     def test_no_illegal_suffixes(self):
         illegal_suffixes = ["_operator.py", "_hook.py", "_sensor.py"]
         files = itertools.chain(
-            *[
+            *(
                 glob.glob(f"{ROOT_FOLDER}/{part}/providers/**/{resource_type}/*.py", recursive=True)
                 for resource_type in ["operators", "hooks", "sensors", "example_dags"]
                 for part in ["airflow", "tests"]
-            ]
+            )
         )
 
         invalid_files = [f for f in files if any(f.endswith(suffix) for suffix in illegal_suffixes)]
diff --git a/tests/core/test_core.py b/tests/core/test_core.py
index 78f2676..ee2df3c 100644
--- a/tests/core/test_core.py
+++ b/tests/core/test_core.py
@@ -386,7 +386,7 @@ class TestCore(unittest.TestCase):
 
         assert 0 == len(op1_fails)
         assert 1 == len(op2_fails)
-        assert sum([f.duration for f in op2_fails]) >= 3
+        assert sum(f.duration for f in op2_fails) >= 3
 
     def test_externally_triggered_dagrun(self):
         TI = TaskInstance
diff --git a/tests/hooks/test_subprocess.py b/tests/hooks/test_subprocess.py
index 6d12f84..336ef91 100644
--- a/tests/hooks/test_subprocess.py
+++ b/tests/hooks/test_subprocess.py
@@ -52,7 +52,7 @@ class TestSubprocessHook(unittest.TestCase):
             Will always echo the special test var named ``OS_ENV_KEY`` into the file to test whether
             ``os.environ`` is passed or not.
             """
-            return '\n'.join([f"echo {k}=${k}>> {filename}" for k in [*keys, OS_ENV_KEY]])
+            return '\n'.join(f"echo {k}=${k}>> {filename}" for k in [*keys, OS_ENV_KEY])
 
         with TemporaryDirectory() as tmp_dir, mock.patch.dict('os.environ', {OS_ENV_KEY: OS_ENV_VAL}):
             tmp_file = Path(tmp_dir, 'test.txt')
diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py
index a243f1a..7b772df 100644
--- a/tests/jobs/test_backfill_job.py
+++ b/tests/jobs/test_backfill_job.py
@@ -767,7 +767,7 @@ class TestBackfillJob(unittest.TestCase):
         # test executor history keeps a list
         history = executor.history
 
-        assert [sorted([item[-1].key[1:3] for item in batch]) for batch in history] == [
+        assert [sorted(item[-1].key[1:3] for item in batch) for batch in history] == [
             [
                 ('leave1', date0),
                 ('leave1', date1),
diff --git a/tests/providers/amazon/aws/operators/test_s3_delete_objects.py b/tests/providers/amazon/aws/operators/test_s3_delete_objects.py
index d134da4..e9e93d8 100644
--- a/tests/providers/amazon/aws/operators/test_s3_delete_objects.py
+++ b/tests/providers/amazon/aws/operators/test_s3_delete_objects.py
@@ -61,7 +61,7 @@ class TestS3DeleteObjectsOperator(unittest.TestCase):
         # The objects should be detected before the DELETE action is taken
         objects_in_dest_bucket = conn.list_objects(Bucket=bucket, Prefix=key_pattern)
         assert len(objects_in_dest_bucket['Contents']) == n_keys
-        assert sorted([x['Key'] for x in objects_in_dest_bucket['Contents']]) == sorted(keys)
+        assert sorted(x['Key'] for x in objects_in_dest_bucket['Contents']) == sorted(keys)
 
         op = S3DeleteObjectsOperator(task_id="test_task_s3_delete_multiple_objects", bucket=bucket, keys=keys)
         op.execute(None)
@@ -84,7 +84,7 @@ class TestS3DeleteObjectsOperator(unittest.TestCase):
         # The objects should be detected before the DELETE action is taken
         objects_in_dest_bucket = conn.list_objects(Bucket=bucket, Prefix=key_pattern)
         assert len(objects_in_dest_bucket['Contents']) == n_keys
-        assert sorted([x['Key'] for x in objects_in_dest_bucket['Contents']]) == sorted(keys)
+        assert sorted(x['Key'] for x in objects_in_dest_bucket['Contents']) == sorted(keys)
 
         op = S3DeleteObjectsOperator(task_id="test_task_s3_delete_prefix", bucket=bucket, prefix=key_pattern)
         op.execute(None)
diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py
index 1f5400a..179dbcc 100644
--- a/tests/providers/apache/hive/hooks/test_hive.py
+++ b/tests/providers/apache/hive/hooks/test_hive.py
@@ -229,7 +229,7 @@ class TestHiveCliHook(unittest.TestCase):
         filepath = "/path/to/input/file"
         table = "output_table"
         field_dict = OrderedDict([("name", "string"), ("gender", "string")])
-        fields = ",\n    ".join([f"`{k.strip('`')}` {v}" for k, v in field_dict.items()])
+        fields = ",\n    ".join(f"`{k.strip('`')}` {v}" for k, v in field_dict.items())
 
         hook = MockHiveCliHook()
         hook.load_file(filepath=filepath, table=table, field_dict=field_dict, create=True, recreate=True)
diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/tests/providers/google/cloud/hooks/test_dataflow.py
index 03d5ce3..0314e5c 100644
--- a/tests/providers/google/cloud/hooks/test_dataflow.py
+++ b/tests/providers/google/cloud/hooks/test_dataflow.py
@@ -1801,7 +1801,7 @@ class TestDataflow(unittest.TestCase):
         name_func=lambda func, num, p: f"{func.__name__}_{num}",
     )
     def test_data_flow_valid_job_id(self, log):
-        echos = ";".join([f"echo {shlex.quote(line)}" for line in log.split("\n")])
+        echos = ";".join(f"echo {shlex.quote(line)}" for line in log.split("\n"))
         cmd = ["bash", "-c", echos]
         found_job_id = None
 
diff --git a/tests/providers/google/cloud/operators/test_dataflow_system.py b/tests/providers/google/cloud/operators/test_dataflow_system.py
index 0157b82..067ecf1 100644
--- a/tests/providers/google/cloud/operators/test_dataflow_system.py
+++ b/tests/providers/google/cloud/operators/test_dataflow_system.py
@@ -120,8 +120,7 @@ class CloudDataflowExampleDagFlexTemplateJavagSystemTest(GoogleSystemTest):
                     "gcloud",
                     "builds",
                     "submit",
-                    "--substitutions="
-                    + ",".join([f"{k}={shlex.quote(v)}" for k, v in substitutions.items()]),
+                    "--substitutions=" + ",".join(f"{k}={shlex.quote(v)}" for k, v in substitutions.items()),
                     f"--config={f.name}",
                     "--no-source",
                 ]
diff --git a/tests/test_utils/logging_command_executor.py b/tests/test_utils/logging_command_executor.py
index 6a79687..da4f01e 100644
--- a/tests/test_utils/logging_command_executor.py
+++ b/tests/test_utils/logging_command_executor.py
@@ -26,11 +26,11 @@ from airflow.utils.log.logging_mixin import LoggingMixin
 class LoggingCommandExecutor(LoggingMixin):
     def execute_cmd(self, cmd, silent=False, cwd=None, env=None):
         if silent:
-            self.log.info("Executing in silent mode: '%s'", " ".join([shlex.quote(c) for c in cmd]))
+            self.log.info("Executing in silent mode: '%s'", " ".join(shlex.quote(c) for c in cmd))
             with open(os.devnull, 'w') as dev_null:
                 return subprocess.call(args=cmd, stdout=dev_null, stderr=subprocess.STDOUT, env=env, cwd=cwd)
         else:
-            self.log.info("Executing: '%s'", " ".join([shlex.quote(c) for c in cmd]))
+            self.log.info("Executing: '%s'", " ".join(shlex.quote(c) for c in cmd))
             with subprocess.Popen(
                 args=cmd,
                 stdout=subprocess.PIPE,
@@ -44,16 +44,16 @@ class LoggingCommandExecutor(LoggingMixin):
                 self.log.info("Stdout: %s", output)
                 self.log.info("Stderr: %s", err)
                 if retcode:
-                    self.log.error("Error when executing %s", " ".join([shlex.quote(c) for c in cmd]))
+                    self.log.error("Error when executing %s", " ".join(shlex.quote(c) for c in cmd))
                 return retcode
 
     def check_output(self, cmd):
-        self.log.info("Executing for output: '%s'", " ".join([shlex.quote(c) for c in cmd]))
+        self.log.info("Executing for output: '%s'", " ".join(shlex.quote(c) for c in cmd))
         with subprocess.Popen(args=cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process:
             output, err = process.communicate()
             retcode = process.poll()
             if retcode:
-                self.log.error("Error when executing '%s'", " ".join([shlex.quote(c) for c in cmd]))
+                self.log.error("Error when executing '%s'", " ".join(shlex.quote(c) for c in cmd))
                 self.log.info("Stdout: %s", output)
                 self.log.info("Stderr: %s", err)
                 raise AirflowException(
diff --git a/tests/test_utils/perf/dags/elastic_dag.py b/tests/test_utils/perf/dags/elastic_dag.py
index 51db138..5e9a494 100644
--- a/tests/test_utils/perf/dags/elastic_dag.py
+++ b/tests/test_utils/perf/dags/elastic_dag.py
@@ -105,7 +105,7 @@ def chain_as_grid(*tasks: BashOperator):
     """
     if len(tasks) > 100 * 99 / 2:
         raise ValueError('Cannot generate grid DAGs with lateral size larger than 100 tasks.')
-    grid_size = min([n for n in range(100) if n * (n + 1) / 2 >= len(tasks)])
+    grid_size = min(n for n in range(100) if n * (n + 1) / 2 >= len(tasks))
 
     def index(i, j):
         """
diff --git a/tests/test_utils/perf/scheduler_ops_metrics.py b/tests/test_utils/perf/scheduler_ops_metrics.py
index a0ffd42..040515a 100644
--- a/tests/test_utils/perf/scheduler_ops_metrics.py
+++ b/tests/test_utils/perf/scheduler_ops_metrics.py
@@ -136,7 +136,7 @@ class SchedulerMetricsJob(SchedulerJob):
         dags = [dagbag.dags[dag_id] for dag_id in DAG_IDS]
         # the tasks in perf_dag_1 and per_dag_2 have a daily schedule interval.
         num_task_instances = sum(
-            [(timezone.utcnow() - task.start_date).days for dag in dags for task in dag.tasks]
+            (timezone.utcnow() - task.start_date).days for dag in dags for task in dag.tasks
         )
 
         if (