You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2020/05/21 18:17:53 UTC

[airflow] branch v1-10-test updated: Fix new flake8 warnings on v1-10-test branch (#8953)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch v1-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/v1-10-test by this push:
     new 4bccfd5  Fix new flake8 warnings on v1-10-test branch (#8953)
4bccfd5 is described below

commit 4bccfd53d0a429c67fe43059fa4b6c1346567053
Author: Ash Berlin-Taylor <as...@firemirror.com>
AuthorDate: Thu May 21 19:16:03 2020 +0100

    Fix new flake8 warnings on v1-10-test branch (#8953)
---
 airflow/api/common/experimental/mark_tasks.py       |  8 +++-----
 airflow/contrib/operators/file_to_wasb.py           |  5 ++---
 airflow/contrib/operators/qubole_check_operator.py  |  2 +-
 airflow/contrib/utils/gcp_field_validator.py        |  2 +-
 airflow/jobs/backfill_job.py                        |  5 ++---
 airflow/operators/docker_operator.py                |  4 ++--
 airflow/www/views.py                                | 18 +++++++++---------
 airflow/www_rbac/utils.py                           |  3 ++-
 airflow/www_rbac/views.py                           | 12 ++++++------
 docs/conf.py                                        |  1 +
 tests/contrib/hooks/test_aws_glue_catalog_hook.py   |  1 +
 tests/contrib/operators/test_gcs_to_gcs_operator.py |  2 +-
 tests/contrib/utils/base_gcp_system_test_case.py    |  2 +-
 tests/test_core.py                                  | 16 ++++++++--------
 14 files changed, 40 insertions(+), 41 deletions(-)

diff --git a/airflow/api/common/experimental/mark_tasks.py b/airflow/api/common/experimental/mark_tasks.py
index 9cc4852..53b52f3 100644
--- a/airflow/api/common/experimental/mark_tasks.py
+++ b/airflow/api/common/experimental/mark_tasks.py
@@ -137,8 +137,7 @@ def all_subdag_tasks_query(sub_dag_run_ids, session, state, confirmed_dates):  #
         filter(
             TaskInstance.dag_id.in_(sub_dag_run_ids),
             TaskInstance.execution_date.in_(confirmed_dates)  # noqa: E123
-        ).\
-        filter(
+        ).filter(  # noqa: E123
             or_(
                 TaskInstance.state.is_(None),
                 TaskInstance.state != state
@@ -154,13 +153,12 @@ def get_all_dag_task_query(dag, session, state, task_ids, confirmed_dates):  # n
             TaskInstance.dag_id == dag.dag_id,
             TaskInstance.execution_date.in_(confirmed_dates),
             TaskInstance.task_id.in_(task_ids)  # noqa: E123
-        ).\
-        filter(
+        ).filter(  # noqa: E123
             or_(
                 TaskInstance.state.is_(None),
                 TaskInstance.state != state
             )
-        )
+        )  # noqa: E123
     return qry_dag
 
 
diff --git a/airflow/contrib/operators/file_to_wasb.py b/airflow/contrib/operators/file_to_wasb.py
index 27760af..8cb9fca 100644
--- a/airflow/contrib/operators/file_to_wasb.py
+++ b/airflow/contrib/operators/file_to_wasb.py
@@ -57,8 +57,7 @@ class FileToWasbOperator(BaseOperator):
         """Upload a file to Azure Blob Storage."""
         hook = WasbHook(wasb_conn_id=self.wasb_conn_id)
         self.log.info(
-            'Uploading %s to wasb://%s '
-            'as %s'.format(self.file_path, self.container_name, self.blob_name)
-        )
+            'Uploading %s to wasb://%s '  # noqa: F523
+            'as %s', self.file_path, self.container_name, self.blob_name)
         hook.load_file(self.file_path, self.container_name,
                        self.blob_name, **self.load_options)
diff --git a/airflow/contrib/operators/qubole_check_operator.py b/airflow/contrib/operators/qubole_check_operator.py
index 0286763..2e8bfa4 100644
--- a/airflow/contrib/operators/qubole_check_operator.py
+++ b/airflow/contrib/operators/qubole_check_operator.py
@@ -223,6 +223,6 @@ def handle_airflow_exception(airflow_exception, hook):
                                 '\nQubole Command Results:' \
                                 '\n{qubole_command_results}'.format(
                 qubole_command_id=qubole_command_id,  # noqa: E122
-                qubole_command_results=qubole_command_results)
+                qubole_command_results=qubole_command_results)  # noqa: E122
             raise AirflowException(str(airflow_exception) + exception_message)
     raise AirflowException(str(airflow_exception))
diff --git a/airflow/contrib/utils/gcp_field_validator.py b/airflow/contrib/utils/gcp_field_validator.py
index 73e37f3..819a597 100644
--- a/airflow/contrib/utils/gcp_field_validator.py
+++ b/airflow/contrib/utils/gcp_field_validator.py
@@ -251,7 +251,7 @@ class GcpBodyFieldValidator(LoggingMixin):
         if not value:
             raise GcpFieldValidationException(
                 "The body field '{}' can't be empty. Please provide a value."
-                .format(full_field_path, value))
+                .format(full_field_path))
 
     def _validate_dict(self, children_validation_specs, full_field_path, value):
         # type: (dict, str, dict) -> None
diff --git a/airflow/jobs/backfill_job.py b/airflow/jobs/backfill_job.py
index 29e426b..b1e0755 100644
--- a/airflow/jobs/backfill_job.py
+++ b/airflow/jobs/backfill_job.py
@@ -556,9 +556,8 @@ class BackfillJob(BaseJob):
                         open_slots = pool.open_slots(session=session)
                         if open_slots <= 0:
                             raise NoAvailablePoolSlot(
-                                "Not scheduling since there are "
-                                "%s open slots in pool %s".format(
-                                    open_slots, task.pool))
+                                "Not scheduling since there are "  # noqa: F523
+                                "%s open slots in pool %s".format(open_slots, task.pool))  # noqa: F523
 
                         num_running_task_instances_in_dag = DAG.get_num_task_instances(
                             self.dag_id,
diff --git a/airflow/operators/docker_operator.py b/airflow/operators/docker_operator.py
index cb335d3..85c17d8 100644
--- a/airflow/operators/docker_operator.py
+++ b/airflow/operators/docker_operator.py
@@ -265,8 +265,8 @@ class DockerOperator(BaseOperator):
         # Pull the docker image if `force_pull` is set or image does not exist locally
         if self.force_pull or len(self.cli.images(name=self.image)) == 0:
             self.log.info('Pulling docker image %s', self.image)
-            for l in self.cli.pull(self.image, stream=True, decode=True):
-                output = json.loads(l.decode('utf-8').strip())
+            for line in self.cli.pull(self.image, stream=True, decode=True):
+                output = json.loads(line.decode('utf-8').strip())
                 if 'status' in output:
                     self.log.info("%s", output['status'])
 
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 3a4fef5..62d2d4b 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -616,19 +616,19 @@ class Airflow(AirflowViewMixin, BaseView):
 
         LastDagRun = (
             session.query(DagRun.dag_id, sqla.func.max(DagRun.execution_date).label('execution_date'))
-                .join(Dag, Dag.dag_id == DagRun.dag_id)
-                .filter(DagRun.state != State.RUNNING)
-                .filter(Dag.is_active == True)  # noqa: E712
-                .filter(Dag.is_subdag == False)  # noqa: E712
-                .group_by(DagRun.dag_id)
+            .join(Dag, Dag.dag_id == DagRun.dag_id)
+            .filter(DagRun.state != State.RUNNING)
+            .filter(Dag.is_active == True)  # noqa: E712
+            .filter(Dag.is_subdag == False)  # noqa: E712
+            .group_by(DagRun.dag_id)
         )
 
         RunningDagRun = (
             session.query(DagRun.dag_id, DagRun.execution_date)
-                .join(Dag, Dag.dag_id == DagRun.dag_id)
-                .filter(DagRun.state == State.RUNNING)
-                .filter(Dag.is_active == True)  # noqa: E712
-                .filter(Dag.is_subdag == False)  # noqa: E712
+            .join(Dag, Dag.dag_id == DagRun.dag_id)
+            .filter(DagRun.state == State.RUNNING)
+            .filter(Dag.is_active == True)  # noqa: E712
+            .filter(Dag.is_subdag == False)  # noqa: E712
         )
 
         if selected_dag_ids:
diff --git a/airflow/www_rbac/utils.py b/airflow/www_rbac/utils.py
index e02aae9..c2c551b 100644
--- a/airflow/www_rbac/utils.py
+++ b/airflow/www_rbac/utils.py
@@ -17,8 +17,9 @@
 # specific language governing permissions and limitations
 # under the License.
 
+# flake8: noqa: E402
 from future import standard_library  # noqa
-standard_library.install_aliases()  # noqa
+standard_library.install_aliases()  # noqa: E402
 
 import functools
 import inspect
diff --git a/airflow/www_rbac/views.py b/airflow/www_rbac/views.py
index 49cb7d2..453cf83 100644
--- a/airflow/www_rbac/views.py
+++ b/airflow/www_rbac/views.py
@@ -423,12 +423,12 @@ class Airflow(AirflowBaseView):
 
         LastDagRun = (
             session.query(
-                        DagRun.dag_id,
-                        sqla.func.max(DagRun.execution_date).label('execution_date'))
-                   .join(Dag, Dag.dag_id == DagRun.dag_id)
-                   .filter(DagRun.state != State.RUNNING)
-                   .filter(Dag.is_active == True)  # noqa
-                   .group_by(DagRun.dag_id)
+                DagRun.dag_id,
+                sqla.func.max(DagRun.execution_date).label('execution_date'))
+            .join(Dag, Dag.dag_id == DagRun.dag_id)
+            .filter(DagRun.state != State.RUNNING)
+            .filter(Dag.is_active == True)  # noqa
+            .group_by(DagRun.dag_id)
         )
 
         RunningDagRun = (
diff --git a/docs/conf.py b/docs/conf.py
index bb63d15..22126ab 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -219,6 +219,7 @@ exclude_patterns = [
     '_api/airflow/www',
     '_api/airflow/www_rbac',
     '_api/main',
+    '_api/mesos_executor',
     'autoapi_templates',
     'howto/operator/gcp/_partials',
 ]
diff --git a/tests/contrib/hooks/test_aws_glue_catalog_hook.py b/tests/contrib/hooks/test_aws_glue_catalog_hook.py
index 57ec7a1..09b45c8 100644
--- a/tests/contrib/hooks/test_aws_glue_catalog_hook.py
+++ b/tests/contrib/hooks/test_aws_glue_catalog_hook.py
@@ -43,6 +43,7 @@ TABLE_INPUT = {
     }
 }
 
+
 @unittest.skipIf(mock_glue is None,
                  "Skipping test because moto.mock_glue is not available")
 class TestAwsGlueCatalogHook(unittest.TestCase):
diff --git a/tests/contrib/operators/test_gcs_to_gcs_operator.py b/tests/contrib/operators/test_gcs_to_gcs_operator.py
index 1e82a2b..f9085e2 100644
--- a/tests/contrib/operators/test_gcs_to_gcs_operator.py
+++ b/tests/contrib/operators/test_gcs_to_gcs_operator.py
@@ -288,7 +288,7 @@ class GoogleCloudStorageToCloudStorageOperatorTest(unittest.TestCase):
         total_wildcards = operator.source_object.count(WILDCARD)
 
         error_msg = "Only one wildcard '[*]' is allowed in source_object parameter. " \
-                    "Found {}".format(total_wildcards, SOURCE_OBJECT_MULTIPLE_WILDCARDS)
+                    "Found {}".format(total_wildcards)
 
         with self.assertRaisesRegexp(AirflowException, error_msg):
             operator.execute(None)
diff --git a/tests/contrib/utils/base_gcp_system_test_case.py b/tests/contrib/utils/base_gcp_system_test_case.py
index 6d3a296..379c05f 100644
--- a/tests/contrib/utils/base_gcp_system_test_case.py
+++ b/tests/contrib/utils/base_gcp_system_test_case.py
@@ -86,7 +86,7 @@ environment. You can enable it in one of two ways:
 * Run this test within automated environment variable workspace where
   config directory is checked out next to the airflow one.
 
-""".format(__file__)
+"""
 
 
 class BaseGcpSystemTestCase(unittest.TestCase, LoggingMixin):
diff --git a/tests/test_core.py b/tests/test_core.py
index 0c63c0e..ebc6604 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -1182,7 +1182,7 @@ class CliTests(unittest.TestCase):
             stdout = mock_stdout.getvalue()
 
         # Check list attempt stdout
-        lines = [l for l in stdout.split('\n') if len(l) > 0]
+        lines = [line for line in stdout.split('\n') if len(line) > 0]
         self.assertListEqual(lines, [
             ("\tThe following args are not compatible with the " +
              "--list flag: ['conn_id', 'conn_uri', 'conn_extra', " +
@@ -1225,7 +1225,7 @@ class CliTests(unittest.TestCase):
             stdout = mock_stdout.getvalue()
 
         # Check addition stdout
-        lines = [l for l in stdout.split('\n') if len(l) > 0]
+        lines = [line for line in stdout.split('\n') if len(line) > 0]
         self.assertListEqual(lines, [
             ("\tSuccessfully added `conn_id`=new1 : " +
              "postgresql://airflow:airflow@host:5432/airflow"),
@@ -1250,7 +1250,7 @@ class CliTests(unittest.TestCase):
             stdout = mock_stdout.getvalue()
 
         # Check stdout for addition attempt
-        lines = [l for l in stdout.split('\n') if len(l) > 0]
+        lines = [line for line in stdout.split('\n') if len(line) > 0]
         self.assertListEqual(lines, [
             "\tA connection with `conn_id`=new1 already exists",
         ])
@@ -1263,7 +1263,7 @@ class CliTests(unittest.TestCase):
             stdout = mock_stdout.getvalue()
 
         # Check stdout for addition attempt
-        lines = [l for l in stdout.split('\n') if len(l) > 0]
+        lines = [line for line in stdout.split('\n') if len(line) > 0]
         self.assertListEqual(lines, [
             ("\tThe following args are required to add a connection:" +
              " ['conn_id']"),
@@ -1277,7 +1277,7 @@ class CliTests(unittest.TestCase):
             stdout = mock_stdout.getvalue()
 
         # Check stdout for addition attempt
-        lines = [l for l in stdout.split('\n') if len(l) > 0]
+        lines = [line for line in stdout.split('\n') if len(line) > 0]
         self.assertListEqual(lines, [
             ("\tThe following args are required to add a connection:" +
              " ['conn_uri or conn_type']"),
@@ -1327,7 +1327,7 @@ class CliTests(unittest.TestCase):
             stdout = mock_stdout.getvalue()
 
         # Check deletion stdout
-        lines = [l for l in stdout.split('\n') if len(l) > 0]
+        lines = [line for line in stdout.split('\n') if len(line) > 0]
         self.assertListEqual(lines, [
             "\tSuccessfully deleted `conn_id`=new1",
             "\tSuccessfully deleted `conn_id`=new2",
@@ -1354,7 +1354,7 @@ class CliTests(unittest.TestCase):
             stdout = mock_stdout.getvalue()
 
         # Check deletion attempt stdout
-        lines = [l for l in stdout.split('\n') if len(l) > 0]
+        lines = [line for line in stdout.split('\n') if len(line) > 0]
         self.assertListEqual(lines, [
             "\tDid not find a connection with `conn_id`=fake",
         ])
@@ -1368,7 +1368,7 @@ class CliTests(unittest.TestCase):
             stdout = mock_stdout.getvalue()
 
         # Check deletion attempt stdout
-        lines = [l for l in stdout.split('\n') if len(l) > 0]
+        lines = [line for line in stdout.split('\n') if len(line) > 0]
         self.assertListEqual(lines, [
             ("\tThe following args are not compatible with the " +
              "--delete flag: ['conn_uri', 'conn_type']"),