You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2023/08/24 09:43:50 UTC

[airflow] branch main updated: Do not create lists we don't need (#33519)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 4154cc04ce Do not create lists we don't need (#33519)
4154cc04ce is described below

commit 4154cc04ce9702b09e6f13d423686fdf4cb7b877
Author: Miroslav Šedivý <67...@users.noreply.github.com>
AuthorDate: Thu Aug 24 09:43:40 2023 +0000

    Do not create lists we don't need (#33519)
---
 airflow/cli/commands/webserver_command.py          |  4 +--
 .../providers/databricks/hooks/databricks_sql.py   | 10 +++---
 .../providers/google/cloud/operators/datafusion.py |  2 +-
 .../commands/production_image_commands.py          | 36 ++++++++++------------
 .../ci/pre_commit/pre_commit_unittest_testcase.py  | 12 ++++----
 tests/core/test_configuration.py                   |  4 +--
 tests/decorators/test_setup_teardown.py            |  2 +-
 7 files changed, 34 insertions(+), 36 deletions(-)

diff --git a/airflow/cli/commands/webserver_command.py b/airflow/cli/commands/webserver_command.py
index 1714e2bd55..2aa686152c 100644
--- a/airflow/cli/commands/webserver_command.py
+++ b/airflow/cli/commands/webserver_command.py
@@ -144,8 +144,8 @@ class GunicornMonitor(LoggingMixin):
                 pass
             return False
 
-        ready_workers = [proc for proc in workers if ready_prefix_on_cmdline(proc)]
-        return len(ready_workers)
+        nb_ready_workers = sum(1 for proc in workers if ready_prefix_on_cmdline(proc))
+        return nb_ready_workers
 
     def _get_num_workers_running(self) -> int:
         """Return number of running Gunicorn workers processes."""
diff --git a/airflow/providers/databricks/hooks/databricks_sql.py b/airflow/providers/databricks/hooks/databricks_sql.py
index 90b17d8ab0..362f1ab43b 100644
--- a/airflow/providers/databricks/hooks/databricks_sql.py
+++ b/airflow/providers/databricks/hooks/databricks_sql.py
@@ -91,10 +91,12 @@ class DatabricksSqlHook(BaseDatabricksHook, DbApiHook):
         result = self._do_api_call(LIST_SQL_ENDPOINTS_ENDPOINT)
         if "endpoints" not in result:
             raise AirflowException("Can't list Databricks SQL endpoints")
-        lst = [endpoint for endpoint in result["endpoints"] if endpoint["name"] == endpoint_name]
-        if not lst:
-            raise AirflowException(f"Can't f Databricks SQL endpoint with name '{endpoint_name}'")
-        return lst[0]
+        try:
+            endpoint = next(endpoint for endpoint in result["endpoints"] if endpoint["name"] == endpoint_name)
+        except StopIteration:
+            raise AirflowException(f"Can't find Databricks SQL endpoint with name '{endpoint_name}'")
+        else:
+            return endpoint
 
     def get_conn(self) -> Connection:
         """Returns a Databricks SQL connection object."""
diff --git a/airflow/providers/google/cloud/operators/datafusion.py b/airflow/providers/google/cloud/operators/datafusion.py
index e43420d3be..5dce5ba290 100644
--- a/airflow/providers/google/cloud/operators/datafusion.py
+++ b/airflow/providers/google/cloud/operators/datafusion.py
@@ -44,7 +44,7 @@ class DataFusionPipelineLinkHelper:
     @staticmethod
     def get_project_id(instance):
         instance = instance["name"]
-        project_id = [x for x in instance.split("/") if x.startswith("airflow")][0]
+        project_id = next(x for x in instance.split("/") if x.startswith("airflow"))
         return project_id
 
 
diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
index b9dc45d681..ba13509b58 100644
--- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
@@ -435,26 +435,22 @@ def check_docker_context_files(install_packages_from_context: bool):
 
     :param install_packages_from_context: whether we want to install from docker-context-files
     """
-    context_file = DOCKER_CONTEXT_DIR.glob("**/*")
-    number_of_context_files = len(
-        [context for context in context_file if context.is_file() and context.name != ".README.md"]
-    )
-    if number_of_context_files == 0:
-        if install_packages_from_context:
-            get_console().print("[warning]\nERROR! You want to install packages from docker-context-files")
-            get_console().print("[warning]\n but there are no packages to install in this folder.")
-            sys.exit(1)
-    else:
-        if not install_packages_from_context:
-            get_console().print(
-                "[warning]\n ERROR! There are some extra files in docker-context-files except README.md"
-            )
-            get_console().print("[warning]\nAnd you did not choose --install-packages-from-context flag")
-            get_console().print(
-                "[warning]\nThis might result in unnecessary cache invalidation and long build times"
-            )
-            get_console().print("[warning]Please restart the command with --cleanup-context switch\n")
-            sys.exit(1)
+    context_file = DOCKER_CONTEXT_DIR.rglob("*")
+    any_context_files = any(context.is_file() and context.name != ".README.md" for context in context_file)
+    if not any_context_files and install_packages_from_context:
+        get_console().print("[warning]\nERROR! You want to install packages from docker-context-files")
+        get_console().print("[warning]\n but there are no packages to install in this folder.")
+        sys.exit(1)
+    elif any_context_files and not install_packages_from_context:
+        get_console().print(
+            "[warning]\n ERROR! There are some extra files in docker-context-files except README.md"
+        )
+        get_console().print("[warning]\nAnd you did not choose --install-packages-from-context flag")
+        get_console().print(
+            "[warning]\nThis might result in unnecessary cache invalidation and long build times"
+        )
+        get_console().print("[warning]Please restart the command with --cleanup-context switch\n")
+        sys.exit(1)
 
 
 def run_build_production_image(
diff --git a/scripts/ci/pre_commit/pre_commit_unittest_testcase.py b/scripts/ci/pre_commit/pre_commit_unittest_testcase.py
index c13c9a9925..3ed21c8edc 100755
--- a/scripts/ci/pre_commit/pre_commit_unittest_testcase.py
+++ b/scripts/ci/pre_commit/pre_commit_unittest_testcase.py
@@ -29,18 +29,18 @@ def check_test_file(file: str) -> int:
     classes = [c for c in node.body if isinstance(c, ast.ClassDef)]
     for c in classes:
         # Some classes are returned as an ast.Attribute, some as an ast.Name object. Not quite sure why
-        parent_classes = [base.attr for base in c.bases if isinstance(base, ast.Attribute)]
-        parent_classes.extend([base.id for base in c.bases if isinstance(base, ast.Name)])
-
-        if "TestCase" in parent_classes:
+        if any(
+            (isinstance(base, ast.Attribute) and base.attr == "TestCase")
+            or (isinstance(base, ast.Name) and base.id == "TestCase")
+            for base in c.bases
+        ):
             found += 1
             print(f"The class {c.name} inherits from TestCase, please use pytest instead")
-
     return found
 
 
 def main(*args: str) -> int:
-    return sum([check_test_file(file) for file in args[1:]])
+    return sum(check_test_file(file) for file in args[1:])
 
 
 if __name__ == "__main__":
diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py
index a7caba2f70..eb66c255b8 100644
--- a/tests/core/test_configuration.py
+++ b/tests/core/test_configuration.py
@@ -1565,7 +1565,7 @@ sql_alchemy_conn=sqlite://test
         all_sections_including_defaults = airflow_cfg.get_sections_including_defaults()
         assert "core" in all_sections_including_defaults
         assert "test-section" in all_sections_including_defaults
-        assert len([section for section in all_sections_including_defaults if section == "core"]) == 1
+        assert sum(1 for section in all_sections_including_defaults if section == "core") == 1
 
     def test_get_options_including_defaults(self):
         airflow_cfg = AirflowConfigParser()
@@ -1589,7 +1589,7 @@ sql_alchemy_conn=sqlite://test
         assert "dags_folder" in all_core_options_including_defaults
         assert "test-value" == airflow_cfg.get("core", "new-test-key")
         assert "test-runner" == airflow_cfg.get("core", "task_runner")
-        assert len([option for option in all_core_options_including_defaults if option == "task_runner"]) == 1
+        assert sum(1 for option in all_core_options_including_defaults if option == "task_runner") == 1
 
 
 def test_sensitive_values():
diff --git a/tests/decorators/test_setup_teardown.py b/tests/decorators/test_setup_teardown.py
index 567ac5c9ab..4ec8376a07 100644
--- a/tests/decorators/test_setup_teardown.py
+++ b/tests/decorators/test_setup_teardown.py
@@ -255,7 +255,7 @@ class TestSetupTearDownTask:
             mytask2()
 
         assert len(dag.task_group.children) == 6
-        assert [x for x in dag.tasks if not x.downstream_list]  # no deps have been set
+        assert sum(1 for x in dag.tasks if not x.downstream_list) == 6
         assert dag.task_group.children["setuptask"].is_setup
         assert dag.task_group.children["teardowntask"].is_teardown
         assert dag.task_group.children["setuptask2"].is_setup