You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2019/01/08 17:12:10 UTC

[GitHub] feng-tao closed pull request #4436: [AIRFLOW-3631] Update flake8 and fix lint.

feng-tao closed pull request #4436: [AIRFLOW-3631] Update flake8 and fix lint.
URL: https://github.com/apache/airflow/pull/4436
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.flake8 b/.flake8
index 2723df1f10..e2ba4cbf6f 100644
--- a/.flake8
+++ b/.flake8
@@ -1,3 +1,3 @@
 [flake8]
 max-line-length = 110
-ignore = E731
+ignore = E731,W504
diff --git a/airflow/config_templates/default_celery.py b/airflow/config_templates/default_celery.py
index 5e72134de1..9daa4d4677 100644
--- a/airflow/config_templates/default_celery.py
+++ b/airflow/config_templates/default_celery.py
@@ -55,7 +55,7 @@ def _broker_supports_visibility_timeout(url):
 celery_ssl_active = False
 try:
     celery_ssl_active = configuration.conf.getboolean('celery', 'SSL_ACTIVE')
-except AirflowConfigException as e:
+except AirflowConfigException:
     log.warning("Celery Executor will run without SSL")
 
 try:
@@ -65,7 +65,7 @@ def _broker_supports_visibility_timeout(url):
                           'ca_certs': configuration.conf.get('celery', 'SSL_CACERT'),
                           'cert_reqs': ssl.CERT_REQUIRED}
         DEFAULT_CELERY_CONFIG['broker_use_ssl'] = broker_use_ssl
-except AirflowConfigException as e:
+except AirflowConfigException:
     raise AirflowException('AirflowConfigException: SSL_ACTIVE is True, '
                            'please ensure SSL_KEY, '
                            'SSL_CERT and SSL_CACERT are set')
diff --git a/airflow/contrib/executors/kubernetes_executor.py b/airflow/contrib/executors/kubernetes_executor.py
index fa81cf3203..b87b5d55c8 100644
--- a/airflow/contrib/executors/kubernetes_executor.py
+++ b/airflow/contrib/executors/kubernetes_executor.py
@@ -444,7 +444,7 @@ def _strip_unsafe_kubernetes_special_chars(string):
 
     @staticmethod
     def _make_safe_pod_id(safe_dag_id, safe_task_id, safe_uuid):
-        """
+        r"""
         Kubernetes pod names must be <= 253 chars and must pass the following regex for
         validation
         "^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
diff --git a/airflow/contrib/hooks/gcp_dataflow_hook.py b/airflow/contrib/hooks/gcp_dataflow_hook.py
index 0eee769d61..9f3833b4f3 100644
--- a/airflow/contrib/hooks/gcp_dataflow_hook.py
+++ b/airflow/contrib/hooks/gcp_dataflow_hook.py
@@ -141,7 +141,7 @@ def _line(self, fd):
     def _extract_job(line):
         # Job id info: https://goo.gl/SE29y9.
         job_id_pattern = re.compile(
-            b'.*console.cloud.google.com/dataflow.*/jobs/([a-z|0-9|A-Z|\-|\_]+).*')
+            br'.*console.cloud.google.com/dataflow.*/jobs/([a-z|0-9|A-Z|\-|\_]+).*')
         matched_job = job_id_pattern.search(line or '')
         if matched_job:
             return matched_job.group(1).decode()
diff --git a/airflow/contrib/hooks/spark_submit_hook.py b/airflow/contrib/hooks/spark_submit_hook.py
index 197b84a7b6..619a400b50 100644
--- a/airflow/contrib/hooks/spark_submit_hook.py
+++ b/airflow/contrib/hooks/spark_submit_hook.py
@@ -391,14 +391,14 @@ def _process_spark_submit_log(self, itr):
             # If we run Kubernetes cluster mode, we want to extract the driver pod id
             # from the logs so we can kill the application when we stop it unexpectedly
             elif self._is_kubernetes:
-                match = re.search('\s*pod name: ((.+?)-([a-z0-9]+)-driver)', line)
+                match = re.search(r'\s*pod name: ((.+?)-([a-z0-9]+)-driver)', line)
                 if match:
                     self._kubernetes_driver_pod = match.groups()[0]
                     self.log.info("Identified spark driver pod: %s",
                                   self._kubernetes_driver_pod)
 
                 # Store the Spark Exit code
-                match_exit_code = re.search('\s*Exit code: (\d+)', line)
+                match_exit_code = re.search(r'\s*Exit code: (\d+)', line)
                 if match_exit_code:
                     self._spark_exit_code = int(match_exit_code.groups()[0])
 
@@ -406,7 +406,7 @@ def _process_spark_submit_log(self, itr):
             # we need to extract the driver id from the logs. This allows us to poll for
             # the status using the driver id. Also, we can kill the driver when needed.
             elif self._should_track_driver_status and not self._driver_id:
-                match_driver_id = re.search('(driver-[0-9\-]+)', line)
+                match_driver_id = re.search(r'(driver-[0-9\-]+)', line)
                 if match_driver_id:
                     self._driver_id = match_driver_id.groups()[0]
                     self.log.info("identified spark driver id: {}"
diff --git a/airflow/contrib/operators/azure_container_instances_operator.py b/airflow/contrib/operators/azure_container_instances_operator.py
index 8b64bb1863..f1b2a2a1a5 100644
--- a/airflow/contrib/operators/azure_container_instances_operator.py
+++ b/airflow/contrib/operators/azure_container_instances_operator.py
@@ -186,7 +186,7 @@ def execute(self, context):
                 raise AirflowException("Container had a non-zero exit code, %s"
                                        % exit_code)
 
-        except CloudError as e:
+        except CloudError:
             self.log.exception("Could not start container group")
             raise AirflowException("Could not start container group")
 
@@ -216,7 +216,7 @@ def _monitor_logging(self, ci_hook, resource_group, name):
                     try:
                         logs = ci_hook.get_logs(resource_group, name)
                         last_line_logged = self._log_last(logs, last_line_logged)
-                    except CloudError as err:
+                    except CloudError:
                         self.log.exception("Exception while getting logs from "
                                            "container instance, retrying...")
 
diff --git a/airflow/contrib/operators/winrm_operator.py b/airflow/contrib/operators/winrm_operator.py
index c81acac44f..e8366b809d 100644
--- a/airflow/contrib/operators/winrm_operator.py
+++ b/airflow/contrib/operators/winrm_operator.py
@@ -115,7 +115,7 @@ def execute(self, context):
                         self.log.info(line)
                     for line in stderr.decode('utf-8').splitlines():
                         self.log.warning(line)
-                except WinRMOperationTimeoutError as e:
+                except WinRMOperationTimeoutError:
                     # this is an expected error when waiting for a
                     # long-running process, just silently retry
                     pass
diff --git a/airflow/contrib/sensors/ftp_sensor.py b/airflow/contrib/sensors/ftp_sensor.py
index c419a46beb..4773b15989 100644
--- a/airflow/contrib/sensors/ftp_sensor.py
+++ b/airflow/contrib/sensors/ftp_sensor.py
@@ -34,7 +34,7 @@ class FTPSensor(BaseSensorOperator):
     """Errors that are transient in nature, and where action can be retried"""
     transient_errors = [421, 425, 426, 434, 450, 451, 452]
 
-    error_code_pattern = re.compile("([\d]+)")
+    error_code_pattern = re.compile(r"([\d]+)")
 
     @apply_defaults
     def __init__(
diff --git a/airflow/hooks/hive_hooks.py b/airflow/hooks/hive_hooks.py
index a79a46d18a..9b25064188 100644
--- a/airflow/hooks/hive_hooks.py
+++ b/airflow/hooks/hive_hooks.py
@@ -289,7 +289,7 @@ def test_hql(self, hql):
                 except AirflowException as e:
                     message = e.args[0].split('\n')[-2]
                     self.log.info(message)
-                    error_loc = re.search('(\d+):(\d+)', message)
+                    error_loc = re.search(r'(\d+):(\d+)', message)
                     if error_loc and error_loc.group(1).isdigit():
                         lst = int(error_loc.group(1))
                         begin = max(lst - 2, 0)
diff --git a/airflow/hooks/webhdfs_hook.py b/airflow/hooks/webhdfs_hook.py
index c4dbe8be65..9833be6cd6 100644
--- a/airflow/hooks/webhdfs_hook.py
+++ b/airflow/hooks/webhdfs_hook.py
@@ -67,7 +67,7 @@ def get_conn(self):
             except HdfsError as e:
                 self.log.debug(
                     "Read operation on namenode {nn.host} "
-                    "failed with error: {e}".format(**locals())
+                    "failed with error: {e}".format(nn=nn, e=e)
                 )
         nn_hosts = [c.host for c in nn_connections]
         no_nn_error = "Read operations failed " \
@@ -83,7 +83,7 @@ def check_for_path(self, hdfs_path):
 
     def load_file(self, source, destination, overwrite=True, parallelism=1,
                   **kwargs):
-        """
+        r"""
         Uploads a file to HDFS
 
         :param source: Local path to file or folder. If a folder, all the files
diff --git a/airflow/lineage/datasets.py b/airflow/lineage/datasets.py
index 40c8edc9a8..49dd492bb7 100644
--- a/airflow/lineage/datasets.py
+++ b/airflow/lineage/datasets.py
@@ -96,7 +96,7 @@ def map_type(name):
             if cls.type_name == name:
                 return cls
 
-        raise NotImplemented("No known mapping for {}".format(name))
+        raise NotImplementedError("No known mapping for {}".format(name))
 
 
 class DataBase(DataSet):
diff --git a/airflow/operators/check_operator.py b/airflow/operators/check_operator.py
index 98af08dd3a..404964dad6 100644
--- a/airflow/operators/check_operator.py
+++ b/airflow/operators/check_operator.py
@@ -158,7 +158,7 @@ def execute(self, context=None):
         elif is_numeric_value_check:
             try:
                 num_rec = [float(r) for r in records]
-            except (ValueError, TypeError) as e:
+            except (ValueError, TypeError):
                 cvestr = "Converting a result to float failed.\n"
                 raise AirflowException(cvestr + except_temp.format(**locals()))
             if self.has_tolerance:
diff --git a/airflow/operators/hive_operator.py b/airflow/operators/hive_operator.py
index cf3f655e59..b7dd2c84fd 100644
--- a/airflow/operators/hive_operator.py
+++ b/airflow/operators/hive_operator.py
@@ -113,7 +113,7 @@ def get_hook(self):
     def prepare_template(self):
         if self.hiveconf_jinja_translate:
             self.hql = re.sub(
-                "(\$\{(hiveconf:)?([ a-zA-Z0-9_]*)\})", "{{ \g<3> }}", self.hql)
+                r"(\$\{(hiveconf:)?([ a-zA-Z0-9_]*)\})", r"{{ \g<3> }}", self.hql)
         if self.script_begin_tag and self.script_begin_tag in self.hql:
             self.hql = "\n".join(self.hql.split(self.script_begin_tag)[1:])
 
diff --git a/airflow/operators/pig_operator.py b/airflow/operators/pig_operator.py
index 69a54b7f06..3426ec522e 100644
--- a/airflow/operators/pig_operator.py
+++ b/airflow/operators/pig_operator.py
@@ -62,7 +62,7 @@ def get_hook(self):
     def prepare_template(self):
         if self.pigparams_jinja_translate:
             self.pig = re.sub(
-                "(\$([a-zA-Z_][a-zA-Z0-9_]*))", "{{ \g<2> }}", self.pig)
+                r"(\$([a-zA-Z_][a-zA-Z0-9_]*))", r"{{ \g<2> }}", self.pig)
 
     def execute(self, context):
         self.log.info('Executing: %s', self.pig)
diff --git a/airflow/security/utils.py b/airflow/security/utils.py
index d46c03af99..e10510e24e 100644
--- a/airflow/security/utils.py
+++ b/airflow/security/utils.py
@@ -30,7 +30,7 @@ def get_components(principal):
     """
     if not principal:
         return None
-    return re.split('[\/@]', str(principal))
+    return re.split(r'[\/@]', str(principal))
 
 
 def replace_hostname_pattern(components, host=None):
diff --git a/airflow/sensors/hdfs_sensor.py b/airflow/sensors/hdfs_sensor.py
index 4d95556f47..298ac48aec 100644
--- a/airflow/sensors/hdfs_sensor.py
+++ b/airflow/sensors/hdfs_sensor.py
@@ -87,7 +87,7 @@ def filter_for_ignored_ext(result, ignored_ext, ignore_copying):
         """
         if ignore_copying:
             log = LoggingMixin().log
-            regex_builder = "^.*\.(%s$)$" % '$|'.join(ignored_ext)
+            regex_builder = r"^.*\.(%s$)$" % '$|'.join(ignored_ext)
             ignored_extensions_regex = re.compile(regex_builder)
             log.debug(
                 'Filtering result for ignored extensions: %s in files %s',
diff --git a/airflow/settings.py b/airflow/settings.py
index 8691fe4e75..55506baa95 100644
--- a/airflow/settings.py
+++ b/airflow/settings.py
@@ -84,13 +84,13 @@ def timing(cls, stat, dt):
 except (socket.gaierror, ImportError):
     log.warning("Could not configure StatsClient, using DummyStatsLogger instead.")
 
-HEADER = """\
-  ____________       _____________
- ____    |__( )_________  __/__  /________      __
-____  /| |_  /__  ___/_  /_ __  /_  __ \_ | /| / /
-___  ___ |  / _  /   _  __/ _  / / /_/ /_ |/ |/ /
- _/_/  |_/_/  /_/    /_/    /_/  \____/____/|__/
- """
+HEADER = '\n'.join([
+    r'  ____________       _____________',
+    r' ____    |__( )_________  __/__  /________      __',
+    r'____  /| |_  /__  ___/_  /_ __  /_  __ \_ | /| / /',
+    r'___  ___ |  / _  /   _  __/ _  / / /_/ /_ |/ |/ /',
+    r' _/_/  |_/_/  /_/    /_/    /_/  \____/____/|__/',
+])
 
 LOGGING_LEVEL = logging.INFO
 
diff --git a/airflow/utils/dates.py b/airflow/utils/dates.py
index 3393de0311..6926535930 100644
--- a/airflow/utils/dates.py
+++ b/airflow/utils/dates.py
@@ -24,7 +24,7 @@
 
 from airflow.utils import timezone
 from datetime import datetime, timedelta
-from dateutil.relativedelta import relativedelta  # flake8: noqa: F401 for doctest
+from dateutil.relativedelta import relativedelta  # noqa: F401 for doctest
 import six
 
 from croniter import croniter
@@ -175,9 +175,7 @@ def round_time(dt, delta, start_date=timezone.make_aware(datetime.min)):
         if start_date + (lower + 1) * delta >= dt:
             # Check if start_date + (lower + 1)*delta or
             # start_date + lower*delta is closer to dt and return the solution
-            if (
-                (start_date + (lower + 1) * delta) - dt <=
-                dt - (start_date + lower * delta)):
+            if (start_date + (lower + 1) * delta) - dt <= dt - (start_date + lower * delta):
                 return start_date + (lower + 1) * delta
             else:
                 return start_date + lower * delta
diff --git a/airflow/utils/helpers.py b/airflow/utils/helpers.py
index 185503e8e1..c35d6aacc9 100644
--- a/airflow/utils/helpers.py
+++ b/airflow/utils/helpers.py
@@ -170,7 +170,7 @@ def chain(*tasks):
 
 
 def cross_downstream(from_tasks, to_tasks):
-    """
+    r"""
     Set downstream dependencies for all tasks in from_tasks to all tasks in to_tasks.
     E.g.: cross_downstream(from_tasks=[t1, t2, t3], to_tasks=[t4, t5, t6])
     Is equivalent to:
diff --git a/airflow/utils/module_loading.py b/airflow/utils/module_loading.py
index 6e638b00d3..51c6adf4e6 100644
--- a/airflow/utils/module_loading.py
+++ b/airflow/utils/module_loading.py
@@ -48,7 +48,7 @@ def import_string(dotted_path):
 
     try:
         return getattr(module, class_name)
-    except AttributeError as err:
+    except AttributeError:
         raise ImportError('Module "{}" does not define a "{}" attribute/class'.format(
             module_path, class_name)
         )
diff --git a/airflow/utils/tests.py b/airflow/utils/tests.py
index 954ac35f78..6604bb1987 100644
--- a/airflow/utils/tests.py
+++ b/airflow/utils/tests.py
@@ -35,5 +35,5 @@ def skipUnlessImported(module, obj):
 
 def assertEqualIgnoreMultipleSpaces(case, first, second, msg=None):
     def _trim(s):
-        re.sub("\s+", " ", s.strip())
+        re.sub(r"\s+", " ", s.strip())
     return case.assertEqual(_trim(first), _trim(second), msg)
diff --git a/airflow/www/views.py b/airflow/www/views.py
index c3866c04c7..7e5344a7cc 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -476,7 +476,7 @@ def chart_data(self):
                         df[df.columns[x_col]])
                     df[df.columns[x_col]] = df[df.columns[x_col]].apply(
                         lambda x: int(x.strftime("%s")) * 1000)
-                except Exception as e:
+                except Exception:
                     payload['error'] = "Time conversion failed"
 
             if chart_type == 'datatable':
diff --git a/airflow/www_rbac/views.py b/airflow/www_rbac/views.py
index e91703ee4b..ae4e3a5a08 100644
--- a/airflow/www_rbac/views.py
+++ b/airflow/www_rbac/views.py
@@ -2238,7 +2238,7 @@ def action_set_failed(self, drs, session=None):
             flash(
                 "{count} dag runs and {altered_ti_count} task instances "
                 "were set to failed".format(**locals()))
-        except Exception as ex:
+        except Exception:
             flash('Failed to set state', 'error')
         return redirect(self.route_base + '/list')
 
@@ -2265,7 +2265,7 @@ def action_set_success(self, drs, session=None):
             flash(
                 "{count} dag runs and {altered_ti_count} task instances "
                 "were set to success".format(**locals()))
-        except Exception as ex:
+        except Exception:
             flash('Failed to set state', 'error')
         return redirect(self.route_base + '/list')
 
@@ -2373,7 +2373,7 @@ def set_task_instance_state(self, tis, target_state, session=None):
             session.commit()
             flash(
                 "{count} task instances were set to '{target_state}'".format(**locals()))
-        except Exception as ex:
+        except Exception:
             flash('Failed to set state', 'error')
 
     @action('set_running', "Set state to 'running'", '', single=False)
diff --git a/setup.py b/setup.py
index 410502c302..cc814456ae 100644
--- a/setup.py
+++ b/setup.py
@@ -262,7 +262,7 @@ def write_version(filename=os.path.join(*['airflow',
     'qds-sdk>=1.9.6',
     'rednose',
     'requests_mock',
-    'flake8==3.5.0'
+    'flake8>=3.6.0',
 ]
 
 if not PY3:
diff --git a/tests/contrib/executors/test_kubernetes_executor.py b/tests/contrib/executors/test_kubernetes_executor.py
index bd68562fef..e1bdd4d65e 100644
--- a/tests/contrib/executors/test_kubernetes_executor.py
+++ b/tests/contrib/executors/test_kubernetes_executor.py
@@ -56,7 +56,7 @@ def _cases(self):
 
     @staticmethod
     def _is_valid_name(name):
-        regex = "^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
+        regex = r"^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
         return (
             len(name) <= 253 and
             all(ch.lower() == ch for ch in name) and
diff --git a/tests/contrib/hooks/test_imap_hook.py b/tests/contrib/hooks/test_imap_hook.py
index 579fda3c79..2ed38286f1 100644
--- a/tests/contrib/hooks/test_imap_hook.py
+++ b/tests/contrib/hooks/test_imap_hook.py
@@ -100,7 +100,7 @@ def test_has_mail_attachments_with_regex_found(self, mock_imaplib):
 
         with ImapHook() as imap_hook:
             has_attachment_in_inbox = imap_hook.has_mail_attachment(
-                name='test(\d+).csv',
+                name=r'test(\d+).csv',
                 check_regex=True
             )
 
@@ -112,7 +112,7 @@ def test_has_mail_attachments_with_regex_not_found(self, mock_imaplib):
 
         with ImapHook() as imap_hook:
             has_attachment_in_inbox = imap_hook.has_mail_attachment(
-                name='test_(\d+).csv',
+                name=r'test_(\d+).csv',
                 check_regex=True
             )
 
@@ -142,7 +142,7 @@ def test_retrieve_mail_attachments_with_regex_found(self, mock_imaplib):
 
         with ImapHook() as imap_hook:
             attachments_in_inbox = imap_hook.retrieve_mail_attachments(
-                name='test(\d+).csv',
+                name=r'test(\d+).csv',
                 check_regex=True
             )
 
@@ -154,7 +154,7 @@ def test_retrieve_mail_attachments_with_regex_not_found(self, mock_imaplib):
 
         with ImapHook() as imap_hook:
             attachments_in_inbox = imap_hook.retrieve_mail_attachments(
-                name='test_(\d+).csv',
+                name=r'test_(\d+).csv',
                 check_regex=True
             )
 
@@ -201,7 +201,7 @@ def test_download_mail_attachments_with_regex_found(self, mock_imaplib, mock_ope
 
         with ImapHook() as imap_hook:
             imap_hook.download_mail_attachments(
-                name='test(\d+).csv',
+                name=r'test(\d+).csv',
                 local_output_directory='test_directory',
                 check_regex=True
             )
@@ -216,7 +216,7 @@ def test_download_mail_attachments_with_regex_not_found(self, mock_imaplib, mock
 
         with ImapHook() as imap_hook:
             imap_hook.download_mail_attachments(
-                name='test_(\d+).csv',
+                name=r'test_(\d+).csv',
                 local_output_directory='test_directory',
                 check_regex=True
             )
diff --git a/tests/contrib/minikube/test_kubernetes_executor.py b/tests/contrib/minikube/test_kubernetes_executor.py
index 58d28a6007..fad358205f 100644
--- a/tests/contrib/minikube/test_kubernetes_executor.py
+++ b/tests/contrib/minikube/test_kubernetes_executor.py
@@ -54,7 +54,7 @@ class KubernetesExecutorTest(unittest.TestCase):
     def _delete_airflow_pod():
         air_pod = check_output(['kubectl', 'get', 'pods']).decode()
         air_pod = air_pod.split('\n')
-        names = [re.compile('\s+').split(x)[0] for x in air_pod if 'airflow' in x]
+        names = [re.compile(r'\s+').split(x)[0] for x in air_pod if 'airflow' in x]
         if names:
             check_call(['kubectl', 'delete', 'pod', names[0]])
 
diff --git a/tests/contrib/operators/test_databricks_operator.py b/tests/contrib/operators/test_databricks_operator.py
index af62a3e4c3..56440cddf6 100644
--- a/tests/contrib/operators/test_databricks_operator.py
+++ b/tests/contrib/operators/test_databricks_operator.py
@@ -185,8 +185,8 @@ def test_init_with_bad_type(self):
             'test': datetime.now()
         }
         # Looks a bit weird since we have to escape regex reserved symbols.
-        exception_message = 'Type \<(type|class) \'datetime.datetime\'\> used ' + \
-                            'for parameter json\[test\] is not a number or a string'
+        exception_message = r'Type \<(type|class) \'datetime.datetime\'\> used ' + \
+                            r'for parameter json\[test\] is not a number or a string'
         with self.assertRaisesRegexp(AirflowException, exception_message):
             DatabricksSubmitRunOperator(task_id=TASK_ID, json=json)
 
@@ -352,8 +352,8 @@ def test_init_with_bad_type(self):
             'test': datetime.now()
         }
         # Looks a bit weird since we have to escape regex reserved symbols.
-        exception_message = 'Type \<(type|class) \'datetime.datetime\'\> used ' + \
-                            'for parameter json\[test\] is not a number or a string'
+        exception_message = r'Type \<(type|class) \'datetime.datetime\'\> used ' + \
+                            r'for parameter json\[test\] is not a number or a string'
         with self.assertRaisesRegexp(AirflowException, exception_message):
             DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=json)
 
diff --git a/tests/contrib/sensors/test_hdfs_sensor.py b/tests/contrib/sensors/test_hdfs_sensor.py
index f72d983447..b03b738686 100644
--- a/tests/contrib/sensors/test_hdfs_sensor.py
+++ b/tests/contrib/sensors/test_hdfs_sensor.py
@@ -236,7 +236,7 @@ def test_should_match_regex_but_copyingext(self):
         self.log.debug('#' * 10)
         self.log.debug('Running %s', self._testMethodName)
         self.log.debug('#' * 10)
-        compiled_regex = re.compile("copying_file_\d+.txt")
+        compiled_regex = re.compile(r"copying_file_\d+.txt")
         task = HdfsSensorRegex(task_id='Should_match_the_regex_but_filesize',
                                filepath='/datadirectory/regex_dir',
                                regex=compiled_regex,
diff --git a/tests/core.py b/tests/core.py
index 9110c0138a..d231a8e1f7 100644
--- a/tests/core.py
+++ b/tests/core.py
@@ -1145,7 +1145,7 @@ def test_cli_connections_list(self):
                         new_callable=six.StringIO) as mock_stdout:
             cli.connections(self.parser.parse_args(['connections', '--list']))
             stdout = mock_stdout.getvalue()
-        conns = [[x.strip("'") for x in re.findall("'\w+'", line)[:2]]
+        conns = [[x.strip("'") for x in re.findall(r"'\w+'", line)[:2]]
                  for ii, line in enumerate(stdout.split('\n'))
                  if ii % 2 == 1]
         conns = [conn for conn in conns if len(conn) > 0]
diff --git a/tests/test_jobs.py b/tests/test_jobs.py
index 75deba44e9..a763b2ae3e 100644
--- a/tests/test_jobs.py
+++ b/tests/test_jobs.py
@@ -3159,7 +3159,7 @@ def test_dag_get_active_runs(self):
 
         try:
             running_date = running_dates[0]
-        except Exception as _:
+        except Exception:
             running_date = 'Except'
 
         self.assertEqual(execution_date, running_date, 'Running Date must match Execution Date')
diff --git a/tox.ini b/tox.ini
index 6065f9d072..491b054b32 100644
--- a/tox.ini
+++ b/tox.ini
@@ -67,6 +67,6 @@ commands =
 basepython = python3
 
 deps =
-    flake8==3.5.0
+    flake8>=3.6.0
 
 commands = flake8


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services