You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ds...@apache.org on 2023/06/16 18:30:24 UTC

[airflow] branch main updated: Upgrade ruff to 0.272 (#31966)

This is an automated email from the ASF dual-hosted git repository.

dstandish pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new fc0e5a4d42 Upgrade ruff to 0.272 (#31966)
fc0e5a4d42 is described below

commit fc0e5a4d42ee882ca5bc20ea65be38b2c739644d
Author: Daniel Standish <15...@users.noreply.github.com>
AuthorDate: Fri Jun 16 11:30:18 2023 -0700

    Upgrade ruff to 0.272 (#31966)
---
 .pre-commit-config.yaml                          |  4 ++--
 airflow/example_dags/example_sensor_decorator.py |  1 -
 airflow/example_dags/tutorial_taskflow_api.py    |  1 -
 airflow/jobs/triggerer_job_runner.py             | 10 +++++-----
 airflow/models/dag.py                            |  3 +--
 airflow/models/dagbag.py                         |  1 -
 airflow/models/taskmixin.py                      |  1 -
 airflow/utils/dag_cycle_tester.py                |  4 ++--
 tests/jobs/test_scheduler_job.py                 |  6 +++---
 9 files changed, 13 insertions(+), 18 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0430658d17..c3447d2e3c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -187,8 +187,8 @@ repos:
         pass_filenames: true
         # Since ruff makes use of multiple cores we _purposefully_ don't run this in docker so it can use the
         # host CPU to it's fullest
-        entry: ruff --fix --no-update-check --force-exclude
-        additional_dependencies: ['ruff==0.0.265']
+        entry: ruff --fix --force-exclude
+        additional_dependencies: ['ruff==0.0.272']
         files: \.pyi?$
         exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py
   - repo: https://github.com/asottile/blacken-docs
diff --git a/airflow/example_dags/example_sensor_decorator.py b/airflow/example_dags/example_sensor_decorator.py
index 62c5d94088..db3059398c 100644
--- a/airflow/example_dags/example_sensor_decorator.py
+++ b/airflow/example_dags/example_sensor_decorator.py
@@ -27,7 +27,6 @@ import pendulum
 from airflow.decorators import dag, task
 from airflow.sensors.base import PokeReturnValue
 
-
 # [END import_module]
 
 
diff --git a/airflow/example_dags/tutorial_taskflow_api.py b/airflow/example_dags/tutorial_taskflow_api.py
index 27a28f4b79..f41f729af8 100644
--- a/airflow/example_dags/tutorial_taskflow_api.py
+++ b/airflow/example_dags/tutorial_taskflow_api.py
@@ -25,7 +25,6 @@ import pendulum
 
 from airflow.decorators import dag, task
 
-
 # [END import_module]
 
 
diff --git a/airflow/jobs/triggerer_job_runner.py b/airflow/jobs/triggerer_job_runner.py
index 5f4b77cefa..8719f4ea95 100644
--- a/airflow/jobs/triggerer_job_runner.py
+++ b/airflow/jobs/triggerer_job_runner.py
@@ -28,7 +28,7 @@ from collections import deque
 from contextlib import suppress
 from copy import copy
 from queue import SimpleQueue
-from typing import TYPE_CHECKING, Deque
+from typing import TYPE_CHECKING
 
 from sqlalchemy import func
 
@@ -429,16 +429,16 @@ class TriggerRunner(threading.Thread, LoggingMixin):
     trigger_cache: dict[str, type[BaseTrigger]]
 
     # Inbound queue of new triggers
-    to_create: Deque[tuple[int, BaseTrigger]]
+    to_create: deque[tuple[int, BaseTrigger]]
 
     # Inbound queue of deleted triggers
-    to_cancel: Deque[int]
+    to_cancel: deque[int]
 
     # Outbound queue of events
-    events: Deque[tuple[int, TriggerEvent]]
+    events: deque[tuple[int, TriggerEvent]]
 
     # Outbound queue of failed triggers
-    failed_triggers: Deque[tuple[int, BaseException]]
+    failed_triggers: deque[tuple[int, BaseException]]
 
     # Should-we-stop flag
     stop: bool = False
diff --git a/airflow/models/dag.py b/airflow/models/dag.py
index a816c9548b..b7361d6f3b 100644
--- a/airflow/models/dag.py
+++ b/airflow/models/dag.py
@@ -39,7 +39,6 @@ from typing import (
     Any,
     Callable,
     Collection,
-    Deque,
     Iterable,
     Iterator,
     List,
@@ -3784,7 +3783,7 @@ class DagContext:
 
     """
 
-    _context_managed_dags: Deque[DAG] = deque()
+    _context_managed_dags: collections.deque[DAG] = deque()
     autoregistered_dags: set[tuple[DAG, ModuleType]] = set()
     current_autoregister_module_name: str | None = None
 
diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py
index 02055b5e94..b422d4699e 100644
--- a/airflow/models/dagbag.py
+++ b/airflow/models/dagbag.py
@@ -101,7 +101,6 @@ class DagBag(LoggingMixin):
         collect_dags: bool = True,
     ):
         # Avoid circular import
-        from airflow.models.dag import DAG
 
         super().__init__()
 
diff --git a/airflow/models/taskmixin.py b/airflow/models/taskmixin.py
index a858ce942f..0c1c94b7b8 100644
--- a/airflow/models/taskmixin.py
+++ b/airflow/models/taskmixin.py
@@ -174,7 +174,6 @@ class DAGNode(DependencyMixin, metaclass=ABCMeta):
         """Sets relatives for the task or task list."""
         from airflow.models.baseoperator import BaseOperator
         from airflow.models.mappedoperator import MappedOperator
-        from airflow.models.operator import Operator
 
         if not isinstance(task_or_task_list, Sequence):
             task_or_task_list = [task_or_task_list]
diff --git a/airflow/utils/dag_cycle_tester.py b/airflow/utils/dag_cycle_tester.py
index 3325a42f39..8f150dc0a3 100644
--- a/airflow/utils/dag_cycle_tester.py
+++ b/airflow/utils/dag_cycle_tester.py
@@ -18,7 +18,7 @@
 from __future__ import annotations
 
 from collections import defaultdict, deque
-from typing import TYPE_CHECKING, Deque
+from typing import TYPE_CHECKING
 
 from airflow.exceptions import AirflowDagCycleException, RemovedInAirflow3Warning
 
@@ -53,7 +53,7 @@ def check_cycle(dag: DAG) -> None:
     """
     # default of int is 0 which corresponds to CYCLE_NEW
     visited: dict[str, int] = defaultdict(int)
-    path_stack: Deque[str] = deque()
+    path_stack: deque[str] = deque()
     task_dict = dag.task_dict
 
     def _check_adjacent_tasks(task_id, current_task):
diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py
index 7191a6ab55..3a3d3f55b4 100644
--- a/tests/jobs/test_scheduler_job.py
+++ b/tests/jobs/test_scheduler_job.py
@@ -24,7 +24,7 @@ import os
 import shutil
 from datetime import timedelta
 from tempfile import mkdtemp
-from typing import Deque, Generator
+from typing import Generator
 from unittest import mock
 from unittest.mock import MagicMock, patch
 
@@ -4745,8 +4745,8 @@ class TestSchedulerJob:
 
             return spy
 
-        num_queued_tis: Deque[int] = collections.deque([], 3)
-        num_finished_events: Deque[int] = collections.deque([], 3)
+        num_queued_tis: collections.deque[int] = collections.deque([], 3)
+        num_finished_events: collections.deque[int] = collections.deque([], 3)
 
         do_scheduling_spy = mock.patch.object(
             job_runner,