You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/05/31 10:52:57 UTC

[airflow] branch master updated: Fixes failing static checks after recent pre-commit upgrade (#16183)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new f47e10c  Fixes failing static checks after recent pre-commit upgrade (#16183)
f47e10c is described below

commit f47e10c3885a028e7c45c10c317a7dbbff9e3ab9
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon May 31 12:52:32 2021 +0200

    Fixes failing static checks after recent pre-commit upgrade (#16183)
---
 airflow/cli/simple_table.py                              | 2 +-
 airflow/jobs/scheduler_job.py                            | 4 ++--
 airflow/providers/apache/sqoop/operators/sqoop.py        | 2 +-
 airflow/serialization/serialized_objects.py              | 5 ++++-
 airflow/www/views.py                                     | 4 ++--
 tests/always/test_project_structure.py                   | 6 +++---
 tests/providers/google/cloud/operators/test_functions.py | 2 +-
 7 files changed, 14 insertions(+), 11 deletions(-)

diff --git a/airflow/cli/simple_table.py b/airflow/cli/simple_table.py
index 20444fa..d17f948 100644
--- a/airflow/cli/simple_table.py
+++ b/airflow/cli/simple_table.py
@@ -61,7 +61,7 @@ class AirflowConsole(Console):
             table.add_column(col)
 
         for row in data:
-            table.add_row(*[str(d) for d in row.values()])
+            table.add_row(*(str(d) for d in row.values()))
         self.print(table)
 
     def print_as_plain_table(self, data: List[Dict]):
diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py
index 6d07fd7..dc3f144 100644
--- a/airflow/jobs/scheduler_job.py
+++ b/airflow/jobs/scheduler_job.py
@@ -1600,13 +1600,13 @@ class SchedulerJob(BaseJob):  # pylint: disable=too-many-instance-attributes
 
         if session.bind.dialect.name == 'mssql':
             active_dagruns_filter = or_(
-                *[
+                *(
                     and_(
                         DagRun.dag_id == dm.dag_id,
                         DagRun.execution_date == dm.next_dagrun,
                     )
                     for dm in dag_models
-                ]
+                )
             )
         else:
             active_dagruns_filter = tuple_(DagRun.dag_id, DagRun.execution_date).in_(
diff --git a/airflow/providers/apache/sqoop/operators/sqoop.py b/airflow/providers/apache/sqoop/operators/sqoop.py
index a790e49..33fb66f 100644
--- a/airflow/providers/apache/sqoop/operators/sqoop.py
+++ b/airflow/providers/apache/sqoop/operators/sqoop.py
@@ -246,7 +246,7 @@ class SqoopOperator(BaseOperator):
         if self.hook is None:
             self.hook = self._get_hook()
         self.log.info('Sending SIGTERM signal to bash process group')
-        os.killpg(os.getpgid(self.hook.sub_process.pid), signal.SIGTERM)
+        os.killpg(os.getpgid(self.hook.sub_process.pid), signal.SIGTERM)  # pylint: disable=no-member
 
     def _get_hook(self) -> SqoopHook:
         return SqoopHook(
diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py
index 451305c..9415e49 100644
--- a/airflow/serialization/serialized_objects.py
+++ b/airflow/serialization/serialized_objects.py
@@ -502,7 +502,10 @@ class SerializedBaseOperator(BaseOperator, BaseSerialization):
 
             elif k == "deps":
                 v = cls._deserialize_deps(v)
-            elif k in cls._decorated_fields or k not in op.get_serialized_fields():
+            elif (
+                k in cls._decorated_fields
+                or k not in op.get_serialized_fields()  # pylint: disable=unsupported-membership-test
+            ):
                 v = cls._deserialize(v)
             # else use v as it is
 
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 5fb4639..03698e0 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -2633,7 +2633,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
         tis = sorted(tis, key=lambda ti: ti.start_date)
         ti_fails = list(
             itertools.chain(
-                *[
+                *(
                     (
                         session.query(TaskFail)
                         .filter(
@@ -2644,7 +2644,7 @@ class Airflow(AirflowBaseView):  # noqa: D101  pylint: disable=too-many-public-m
                         .all()
                     )
                     for ti in tis
-                ]
+                )
             )
         )
 
diff --git a/tests/always/test_project_structure.py b/tests/always/test_project_structure.py
index 560f379..d4d8645 100644
--- a/tests/always/test_project_structure.py
+++ b/tests/always/test_project_structure.py
@@ -225,7 +225,7 @@ class TestGoogleProviderProjectStructure(unittest.TestCase):
 
     def test_example_dags(self):
         operators_modules = itertools.chain(
-            *[self.find_resource_files(resource_type=d) for d in ["operators", "sensors", "transfers"]]
+            *(self.find_resource_files(resource_type=d) for d in ["operators", "sensors", "transfers"])
         )
         example_dags_files = self.find_resource_files(resource_type="example_dags")
         # Generate tuple of department and service e.g. ('marketing_platform', 'display_video')
@@ -337,11 +337,11 @@ class TestOperatorsHooks(unittest.TestCase):
     def test_no_illegal_suffixes(self):
         illegal_suffixes = ["_operator.py", "_hook.py", "_sensor.py"]
         files = itertools.chain(
-            *[
+            *(
                 glob.glob(f"{ROOT_FOLDER}/{part}/providers/**/{resource_type}/*.py", recursive=True)
                 for resource_type in ["operators", "hooks", "sensors", "example_dags"]
                 for part in ["airflow", "tests"]
-            ]
+            )
         )
 
         invalid_files = [f for f in files if any(f.endswith(suffix) for suffix in illegal_suffixes)]
diff --git a/tests/providers/google/cloud/operators/test_functions.py b/tests/providers/google/cloud/operators/test_functions.py
index 5fc68de..b0bc5de 100644
--- a/tests/providers/google/cloud/operators/test_functions.py
+++ b/tests/providers/google/cloud/operators/test_functions.py
@@ -608,7 +608,7 @@ class TestGcfFunctionDelete(unittest.TestCase):
         mock_hook.return_value.delete_function.assert_called_once_with(
             'projects/project_name/locations/project_location/functions/function_name'
         )
-        assert result['name'] == self._FUNCTION_NAME
+        assert result['name'] == self._FUNCTION_NAME  # pylint: disable=unsubscriptable-object
 
     @mock.patch('airflow.providers.google.cloud.operators.functions.CloudFunctionsHook')
     def test_correct_name(self, mock_hook):