You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2020/08/25 08:51:11 UTC

[airflow] branch master updated: PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new d760265  PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)
d760265 is described below

commit d7602654526fdd2876466371404784bd17cfe0d2
Author: Kaxil Naik <ka...@gmail.com>
AuthorDate: Tue Aug 25 09:50:21 2020 +0100

    PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)
---
 .pre-commit-config.yaml                            |  2 +-
 airflow/api/auth/backend/kerberos_auth.py          |  2 +-
 airflow/api_connexion/schemas/config_schema.py     | 12 ++++----
 airflow/api_connexion/schemas/connection_schema.py |  6 ++--
 airflow/api_connexion/schemas/dag_run_schema.py    |  8 +++---
 airflow/api_connexion/schemas/enum_schemas.py      |  2 +-
 airflow/api_connexion/schemas/error_schema.py      |  2 +-
 airflow/api_connexion/schemas/event_log_schema.py  |  8 +++---
 airflow/api_connexion/schemas/health_schema.py     |  4 +--
 airflow/api_connexion/schemas/log_schema.py        |  4 +--
 airflow/api_connexion/schemas/variable_schema.py   |  4 +--
 airflow/api_connexion/schemas/xcom_schema.py       |  6 ++--
 airflow/cli/commands/legacy_commands.py            |  2 +-
 airflow/models/base.py                             |  4 +--
 airflow/models/baseoperator.py                     |  2 +-
 airflow/models/connection.py                       |  2 +-
 airflow/models/dagrun.py                           |  2 +-
 airflow/models/pool.py                             |  2 +-
 airflow/models/renderedtifields.py                 |  2 +-
 airflow/models/skipmixin.py                        |  3 +-
 airflow/models/taskinstance.py                     |  2 +-
 airflow/models/variable.py                         |  2 +-
 airflow/providers/apache/hive/hooks/hive.py        |  2 +-
 .../providers/apache/spark/operators/spark_sql.py  |  2 +-
 .../providers/elasticsearch/log/es_task_handler.py |  2 +-
 airflow/providers/facebook/ads/hooks/ads.py        |  2 +-
 airflow/providers/google/cloud/hooks/bigquery.py   | 32 +++++++++++-----------
 airflow/providers/google/cloud/hooks/kms.py        |  4 +--
 airflow/settings.py                                | 14 +++++-----
 airflow/stats.py                                   |  2 +-
 airflow/utils/log/log_reader.py                    |  2 +-
 airflow/www/api/experimental/endpoints.py          | 10 +++----
 airflow/www/utils.py                               |  2 +-
 airflow/www/views.py                               |  2 +-
 dev/airflow-pr                                     |  2 +-
 docs/exts/docroles.py                              |  2 +-
 tests/dags/test_task_view_type_check.py            |  4 +--
 tests/operators/test_sql.py                        | 26 +++++++++---------
 tests/providers/jenkins/hooks/test_jenkins.py      |  4 +--
 tests/providers/slack/hooks/test_slack.py          | 12 ++++----
 tests/providers/yandex/hooks/test_yandex.py        |  4 +--
 tests/serialization/test_dag_serialization.py      |  2 +-
 42 files changed, 107 insertions(+), 108 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 93469c5..f15fa94 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -192,7 +192,7 @@ repos:
         name: Run pydocstyle
         args:
           - --convention=pep257
-          - --add-ignore=D100,D102,D104,D105,D106,D107,D200,D202,D204,D205,D207,D208,D210,D400,D401
+          - --add-ignore=D100,D102,D104,D105,D106,D107,D200,D202,D204,D205,D207,D208,D400,D401
         exclude: ^tests/.*\.py$|^scripts/.*\.py$|^dev|^backport_packages|^kubernetes_tests
   - repo: local
     hooks:
diff --git a/airflow/api/auth/backend/kerberos_auth.py b/airflow/api/auth/backend/kerberos_auth.py
index ea9afa7..dbb54d5 100644
--- a/airflow/api/auth/backend/kerberos_auth.py
+++ b/airflow/api/auth/backend/kerberos_auth.py
@@ -60,7 +60,7 @@ CLIENT_AUTH: Optional[Union[Tuple[str, str], AuthBase]] = HTTPKerberosAuth(servi
 
 
 class KerberosService:  # pylint: disable=too-few-public-methods
-    """Class to keep information about the Kerberos Service initialized """
+    """Class to keep information about the Kerberos Service initialized"""
     def __init__(self):
         self.service_name = None
 
diff --git a/airflow/api_connexion/schemas/config_schema.py b/airflow/api_connexion/schemas/config_schema.py
index e1665a7..bb7be9c 100644
--- a/airflow/api_connexion/schemas/config_schema.py
+++ b/airflow/api_connexion/schemas/config_schema.py
@@ -21,36 +21,36 @@ from marshmallow import Schema, fields
 
 
 class ConfigOptionSchema(Schema):
-    """ Config Option Schema """
+    """Config Option Schema"""
     key = fields.String(required=True)
     value = fields.String(required=True)
 
 
 class ConfigOption(NamedTuple):
-    """ Config option """
+    """Config option"""
     key: str
     value: str
 
 
 class ConfigSectionSchema(Schema):
-    """ Config Section Schema"""
+    """Config Section Schema"""
     name = fields.String(required=True)
     options = fields.List(fields.Nested(ConfigOptionSchema))
 
 
 class ConfigSection(NamedTuple):
-    """ List of config options within a section """
+    """List of config options within a section"""
     name: str
     options: List[ConfigOption]
 
 
 class ConfigSchema(Schema):
-    """ Config Schema"""
+    """Config Schema"""
     sections = fields.List(fields.Nested(ConfigSectionSchema))
 
 
 class Config(NamedTuple):
-    """ List of config sections with their options """
+    """List of config sections with their options"""
     sections: List[ConfigSection]
 
 
diff --git a/airflow/api_connexion/schemas/connection_schema.py b/airflow/api_connexion/schemas/connection_schema.py
index 9c2eba3..e1d0a78 100644
--- a/airflow/api_connexion/schemas/connection_schema.py
+++ b/airflow/api_connexion/schemas/connection_schema.py
@@ -29,7 +29,7 @@ class ConnectionCollectionItemSchema(SQLAlchemySchema):
     """
 
     class Meta:
-        """ Meta """
+        """Meta"""
         model = Connection
 
     connection_id = auto_field('conn_id', required=True)
@@ -50,13 +50,13 @@ class ConnectionSchema(ConnectionCollectionItemSchema):  # pylint: disable=too-m
 
 
 class ConnectionCollection(NamedTuple):
-    """ List of Connections with meta"""
+    """List of Connections with meta"""
     connections: List[Connection]
     total_entries: int
 
 
 class ConnectionCollectionSchema(Schema):
-    """ Connection Collection Schema"""
+    """Connection Collection Schema"""
     connections = fields.List(fields.Nested(ConnectionCollectionItemSchema))
     total_entries = fields.Int()
 
diff --git a/airflow/api_connexion/schemas/dag_run_schema.py b/airflow/api_connexion/schemas/dag_run_schema.py
index cef827a..f13d3d7 100644
--- a/airflow/api_connexion/schemas/dag_run_schema.py
+++ b/airflow/api_connexion/schemas/dag_run_schema.py
@@ -29,7 +29,7 @@ from airflow.utils.types import DagRunType
 
 
 class ConfObject(fields.Field):
-    """ The conf field"""
+    """The conf field"""
     def _serialize(self, value, attr, obj, **kwargs):
         if not value:
             return {}
@@ -47,7 +47,7 @@ class DAGRunSchema(SQLAlchemySchema):
     """
 
     class Meta:
-        """ Meta """
+        """Meta"""
 
         model = DagRun
         dateformat = "iso"
@@ -90,10 +90,10 @@ class DAGRunCollectionSchema(Schema):
 
 
 class DagRunsBatchFormSchema(Schema):
-    """ Schema to validate and deserialize the Form(request payload) submitted to DagRun Batch endpoint"""
+    """Schema to validate and deserialize the Form(request payload) submitted to DagRun Batch endpoint"""
 
     class Meta:
-        """ Meta """
+        """Meta"""
         datetimeformat = 'iso'
         strict = True
 
diff --git a/airflow/api_connexion/schemas/enum_schemas.py b/airflow/api_connexion/schemas/enum_schemas.py
index f0a38cd..8e7280a 100644
--- a/airflow/api_connexion/schemas/enum_schemas.py
+++ b/airflow/api_connexion/schemas/enum_schemas.py
@@ -21,7 +21,7 @@ from airflow.utils.state import State
 
 
 class DagStateField(fields.String):
-    """ Schema for DagState Enum"""
+    """Schema for DagState Enum"""
     def __init__(self, **metadata):
         super().__init__(**metadata)
         self.validators = (
diff --git a/airflow/api_connexion/schemas/error_schema.py b/airflow/api_connexion/schemas/error_schema.py
index 3fee9a9..d08ce51 100644
--- a/airflow/api_connexion/schemas/error_schema.py
+++ b/airflow/api_connexion/schemas/error_schema.py
@@ -44,7 +44,7 @@ class ImportErrorCollection(NamedTuple):
 
 
 class ImportErrorCollectionSchema(Schema):
-    """ Import error collection schema """
+    """Import error collection schema"""
 
     import_errors = fields.List(fields.Nested(ImportErrorSchema))
     total_entries = fields.Int()
diff --git a/airflow/api_connexion/schemas/event_log_schema.py b/airflow/api_connexion/schemas/event_log_schema.py
index 0753a8a..c0b1bb2 100644
--- a/airflow/api_connexion/schemas/event_log_schema.py
+++ b/airflow/api_connexion/schemas/event_log_schema.py
@@ -24,10 +24,10 @@ from airflow.models.log import Log
 
 
 class EventLogSchema(SQLAlchemySchema):
-    """ Event log schema """
+    """Event log schema"""
 
     class Meta:
-        """ Meta """
+        """Meta"""
         model = Log
 
     id = auto_field(data_key='event_log_id', dump_only=True)
@@ -41,13 +41,13 @@ class EventLogSchema(SQLAlchemySchema):
 
 
 class EventLogCollection(NamedTuple):
-    """ List of import errors with metadata """
+    """List of import errors with metadata"""
     event_logs: List[Log]
     total_entries: int
 
 
 class EventLogCollectionSchema(Schema):
-    """ EventLog Collection Schema """
+    """EventLog Collection Schema"""
 
     event_logs = fields.List(fields.Nested(EventLogSchema))
     total_entries = fields.Int()
diff --git a/airflow/api_connexion/schemas/health_schema.py b/airflow/api_connexion/schemas/health_schema.py
index c0cbebb..bccfc0a 100644
--- a/airflow/api_connexion/schemas/health_schema.py
+++ b/airflow/api_connexion/schemas/health_schema.py
@@ -28,12 +28,12 @@ class MetaDatabaseInfoSchema(BaseInfoSchema):
 
 
 class SchedulerInfoSchema(BaseInfoSchema):
-    """ Schema for Metadatabase info"""
+    """Schema for Metadatabase info"""
     latest_scheduler_heartbeat = fields.String(dump_only=True)
 
 
 class HeathInfoSchema(Schema):
-    """ Schema for the Health endpoint """
+    """Schema for the Health endpoint"""
 
     metadatabase = fields.Nested(MetaDatabaseInfoSchema)
     scheduler = fields.Nested(SchedulerInfoSchema)
diff --git a/airflow/api_connexion/schemas/log_schema.py b/airflow/api_connexion/schemas/log_schema.py
index e59416f..b9b7817 100644
--- a/airflow/api_connexion/schemas/log_schema.py
+++ b/airflow/api_connexion/schemas/log_schema.py
@@ -20,14 +20,14 @@ from marshmallow import Schema, fields
 
 
 class LogsSchema(Schema):
-    """ Schema for logs """
+    """Schema for logs"""
 
     content = fields.Str()
     continuation_token = fields.Str()
 
 
 class LogResponseObject(NamedTuple):
-    """ Log Response Object """
+    """Log Response Object"""
     content: str
     continuation_token: str
 
diff --git a/airflow/api_connexion/schemas/variable_schema.py b/airflow/api_connexion/schemas/variable_schema.py
index c0c7bd1..4c73f8c 100644
--- a/airflow/api_connexion/schemas/variable_schema.py
+++ b/airflow/api_connexion/schemas/variable_schema.py
@@ -19,13 +19,13 @@ from marshmallow import Schema, fields
 
 
 class VariableSchema(Schema):
-    """ Variable Schema """
+    """Variable Schema"""
     key = fields.String(required=True)
     value = fields.String(attribute="val", required=True)
 
 
 class VariableCollectionSchema(Schema):
-    """ Variable Collection Schema """
+    """Variable Collection Schema"""
     variables = fields.List(fields.Nested(VariableSchema))
     total_entries = fields.Int()
 
diff --git a/airflow/api_connexion/schemas/xcom_schema.py b/airflow/api_connexion/schemas/xcom_schema.py
index 9d06c1b..2687325 100644
--- a/airflow/api_connexion/schemas/xcom_schema.py
+++ b/airflow/api_connexion/schemas/xcom_schema.py
@@ -28,7 +28,7 @@ class XComCollectionItemSchema(SQLAlchemySchema):
     """
 
     class Meta:
-        """ Meta """
+        """Meta"""
         model = XCom
 
     key = auto_field()
@@ -47,13 +47,13 @@ class XComSchema(XComCollectionItemSchema):
 
 
 class XComCollection(NamedTuple):
-    """ List of XComs with meta"""
+    """List of XComs with meta"""
     xcom_entries: List[XCom]
     total_entries: int
 
 
 class XComCollectionSchema(Schema):
-    """ XCom Collection Schema"""
+    """XCom Collection Schema"""
     xcom_entries = fields.List(fields.Nested(XComCollectionItemSchema))
     total_entries = fields.Int()
 
diff --git a/airflow/cli/commands/legacy_commands.py b/airflow/cli/commands/legacy_commands.py
index b2ca64a..aa0789c 100644
--- a/airflow/cli/commands/legacy_commands.py
+++ b/airflow/cli/commands/legacy_commands.py
@@ -49,7 +49,7 @@ COMMAND_MAP = {
 
 
 def check_legacy_command(action, value):
-    """ Checks command value and raise error if value is in removed command """
+    """Checks command value and raise error if value is in removed command"""
     new_command = COMMAND_MAP.get(value)
     if new_command is not None:
         msg = f"`airflow {value}` command, has been removed, please use `airflow {new_command}`"
diff --git a/airflow/models/base.py b/airflow/models/base.py
index 9774de0..dc19534 100644
--- a/airflow/models/base.py
+++ b/airflow/models/base.py
@@ -37,11 +37,11 @@ ID_LEN = 250
 
 # used for typing
 class Operator:
-    """ Class just used for Typing """
+    """Class just used for Typing"""
 
 
 def get_id_collation_args():
-    """ Get SQLAlchemy args to use for COLLATION """
+    """Get SQLAlchemy args to use for COLLATION"""
     collation = conf.get('core', 'sql_engine_collation_for_ids', fallback=None)
     if collation:
         return {'collation': collation}
diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py
index 5deeadc..352ebcd 100644
--- a/airflow/models/baseoperator.py
+++ b/airflow/models/baseoperator.py
@@ -1299,7 +1299,7 @@ class BaseOperator(Operator, LoggingMixin, metaclass=BaseOperatorMeta):
 
     @cached_property
     def extra_links(self) -> List[str]:
-        """@property: extra links for the task. """
+        """@property: extra links for the task"""
         return list(set(self.operator_extra_link_dict.keys())
                     .union(self.global_operator_extra_link_dict.keys()))
 
diff --git a/airflow/models/connection.py b/airflow/models/connection.py
index 17658f7..4bedc22 100644
--- a/airflow/models/connection.py
+++ b/airflow/models/connection.py
@@ -308,7 +308,7 @@ class Connection(Base, LoggingMixin):
                        descriptor=property(cls.get_extra, cls.set_extra))
 
     def rotate_fernet_key(self):
-        """Encrypts data with a new key. See: :ref:`security/fernet`. """
+        """Encrypts data with a new key. See: :ref:`security/fernet`"""
         fernet = get_fernet()
         if self._password and self.is_encrypted:
             self._password = fernet.rotate(self._password.encode('utf-8')).decode()
diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py
index 736da96..ff043a6 100644
--- a/airflow/models/dagrun.py
+++ b/airflow/models/dagrun.py
@@ -523,7 +523,7 @@ class DagRun(Base, LoggingMixin):
     @classmethod
     @provide_session
     def get_latest_runs(cls, session=None):
-        """Returns the latest DagRun for each DAG. """
+        """Returns the latest DagRun for each DAG"""
         subquery = (
             session
             .query(
diff --git a/airflow/models/pool.py b/airflow/models/pool.py
index f351fd5..2a61a75 100644
--- a/airflow/models/pool.py
+++ b/airflow/models/pool.py
@@ -30,7 +30,7 @@ from airflow.utils.state import State
 
 
 class PoolStats(TypedDict):
-    """ Dictionary containing Pool Stats """
+    """Dictionary containing Pool Stats"""
     total: int
     running: int
     queued: int
diff --git a/airflow/models/renderedtifields.py b/airflow/models/renderedtifields.py
index e4f1f00..515fb68 100644
--- a/airflow/models/renderedtifields.py
+++ b/airflow/models/renderedtifields.py
@@ -15,7 +15,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""Save Rendered Template Fields """
+"""Save Rendered Template Fields"""
 from typing import Optional
 
 import sqlalchemy_jsonfield
diff --git a/airflow/models/skipmixin.py b/airflow/models/skipmixin.py
index 0ad2eb8..ba911da 100644
--- a/airflow/models/skipmixin.py
+++ b/airflow/models/skipmixin.py
@@ -35,8 +35,7 @@ XCOM_SKIPMIXIN_FOLLOWED = "followed"
 
 
 class SkipMixin(LoggingMixin):
-    """ A Mixin to skip Tasks Instances """
-
+    """A Mixin to skip Tasks Instances"""
     def _set_state_to_skipped(self, dag_run, execution_date, tasks, session):
         """
         Used internally to set state of task instances to skipped from the same dag run.
diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py
index e0c465e..0165614 100644
--- a/airflow/models/taskinstance.py
+++ b/airflow/models/taskinstance.py
@@ -268,7 +268,7 @@ class TaskInstance(Base, LoggingMixin):     # pylint: disable=R0902,R0904
 
     @reconstructor
     def init_on_load(self):
-        """ Initialize the attributes that aren't stored in the DB. """
+        """Initialize the attributes that aren't stored in the DB"""
         self.test_mode = False  # can be changed when calling 'run'
 
     @property
diff --git a/airflow/models/variable.py b/airflow/models/variable.py
index a306dcf..a9222c4 100644
--- a/airflow/models/variable.py
+++ b/airflow/models/variable.py
@@ -177,7 +177,7 @@ class Variable(Base, LoggingMixin):
         return session.query(cls).filter(cls.key == key).delete()
 
     def rotate_fernet_key(self):
-        """ Rotate Fernet Key """
+        """Rotate Fernet Key"""
         fernet = get_fernet()
         if self._val and self.is_encrypted:
             self._val = fernet.rotate(self._val.encode('utf-8')).decode()
diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py
index 8e577a3..677ba3f 100644
--- a/airflow/providers/apache/hive/hooks/hive.py
+++ b/airflow/providers/apache/hive/hooks/hive.py
@@ -491,7 +491,7 @@ class HiveCliHook(BaseHook):
 
 
 class HiveMetastoreHook(BaseHook):
-    """ Wrapper to interact with the Hive Metastore"""
+    """Wrapper to interact with the Hive Metastore"""
 
     # java short max val
     MAX_PART_COUNT = 32767
diff --git a/airflow/providers/apache/spark/operators/spark_sql.py b/airflow/providers/apache/spark/operators/spark_sql.py
index 59a1001..fec7f6f 100644
--- a/airflow/providers/apache/spark/operators/spark_sql.py
+++ b/airflow/providers/apache/spark/operators/spark_sql.py
@@ -109,7 +109,7 @@ class SparkSqlOperator(BaseOperator):
         self._hook.kill()
 
     def _get_hook(self) -> SparkSqlHook:
-        """ Get SparkSqlHook """
+        """Get SparkSqlHook"""
         return SparkSqlHook(sql=self._sql,
                             conf=self._conf,
                             conn_id=self._conn_id,
diff --git a/airflow/providers/elasticsearch/log/es_task_handler.py b/airflow/providers/elasticsearch/log/es_task_handler.py
index 44432b9..6ff2ef3 100644
--- a/airflow/providers/elasticsearch/log/es_task_handler.py
+++ b/airflow/providers/elasticsearch/log/es_task_handler.py
@@ -281,7 +281,7 @@ class ElasticsearchTaskHandler(FileTaskHandler, LoggingMixin):
 
     @property
     def log_name(self) -> str:
-        """ The log name"""
+        """The log name"""
         return self.LOG_NAME
 
     def get_external_log_url(self, task_instance: TaskInstance, try_number: int) -> str:
diff --git a/airflow/providers/facebook/ads/hooks/ads.py b/airflow/providers/facebook/ads/hooks/ads.py
index c8f01ab..1318cb2 100644
--- a/airflow/providers/facebook/ads/hooks/ads.py
+++ b/airflow/providers/facebook/ads/hooks/ads.py
@@ -72,7 +72,7 @@ class FacebookAdsReportingHook(BaseHook):
                                        "account_id"]
 
     def _get_service(self) -> FacebookAdsApi:
-        """ Returns Facebook Ads Client using a service account"""
+        """Returns Facebook Ads Client using a service account"""
         config = self.facebook_ads_config
         return FacebookAdsApi.init(app_id=config["app_id"],
                                    app_secret=config["app_secret"],
diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py
index 59885f2..6133c11 100644
--- a/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/airflow/providers/google/cloud/hooks/bigquery.py
@@ -2240,17 +2240,17 @@ class BigQueryConnection:
         self._kwargs = kwargs
 
     def close(self) -> None:   # noqa: D403
-        """BigQueryConnection does not have anything to close. """
+        """BigQueryConnection does not have anything to close"""
 
     def commit(self) -> None:   # noqa: D403
-        """BigQueryConnection does not support transactions. """
+        """BigQueryConnection does not support transactions"""
 
     def cursor(self) -> "BigQueryCursor":   # noqa: D403
-        """Return a new :py:class:`Cursor` object using the connection. """
+        """Return a new :py:class:`Cursor` object using the connection"""
         return BigQueryCursor(*self._args, **self._kwargs)
 
     def rollback(self) -> NoReturn:   # noqa: D403
-        """BigQueryConnection does not have transactions """
+        """BigQueryConnection does not have transactions"""
         raise NotImplementedError(
             "BigQueryConnection does not have transactions")
 
@@ -2584,15 +2584,15 @@ class BigQueryCursor(BigQueryBaseCursor):
 
     @property
     def description(self) -> NoReturn:
-        """ The schema description method is not currently implemented. """
+        """The schema description method is not currently implemented"""
         raise NotImplementedError
 
     def close(self) -> None:
-        """ By default, do nothing """
+        """By default, do nothing"""
 
     @property
     def rowcount(self) -> int:
-        """ By default, return -1 to indicate that this is not supported. """
+        """By default, return -1 to indicate that this is not supported"""
         return -1
 
     def execute(self, operation: str, parameters: Optional[Dict] = None) -> None:
@@ -2623,14 +2623,14 @@ class BigQueryCursor(BigQueryBaseCursor):
             self.execute(operation, parameters)
 
     def flush_results(self) -> None:
-        """ Flush results related cursor attributes. """
+        """Flush results related cursor attributes"""
         self.page_token = None
         self.job_id = None
         self.all_pages_loaded = False
         self.buffer = []
 
     def fetchone(self) -> Union[List, None]:
-        """ Fetch the next row of a query result set. """
+        """Fetch the next row of a query result set"""
         # pylint: disable=not-callable
         return self.next()
 
@@ -2712,24 +2712,24 @@ class BigQueryCursor(BigQueryBaseCursor):
         return result
 
     def get_arraysize(self) -> int:
-        """ Specifies the number of rows to fetch at a time with .fetchmany() """
+        """Specifies the number of rows to fetch at a time with .fetchmany()"""
         return self.buffersize or 1
 
     def set_arraysize(self, arraysize: int) -> None:
-        """ Specifies the number of rows to fetch at a time with .fetchmany() """
+        """Specifies the number of rows to fetch at a time with .fetchmany()"""
         self.buffersize = arraysize
 
     arraysize = property(get_arraysize, set_arraysize)
 
     def setinputsizes(self, sizes: Any) -> None:
-        """ Does nothing by default """
+        """Does nothing by default"""
 
     def setoutputsize(self, size: Any, column: Any = None) -> None:
-        """ Does nothing by default """
+        """Does nothing by default"""
 
 
 def _bind_parameters(operation: str, parameters: Dict) -> str:
-    """ Helper method that binds parameters to a SQL query. """
+    """Helper method that binds parameters to a SQL query"""
     # inspired by MySQL Python Connector (conversion.py)
     string_parameters = {}  # type Dict[str, str]
     for (name, value) in parameters.items():
@@ -2743,7 +2743,7 @@ def _bind_parameters(operation: str, parameters: Dict) -> str:
 
 
 def _escape(s: str) -> str:
-    """ Helper method that escapes parameters to a SQL query. """
+    """Helper method that escapes parameters to a SQL query"""
     e = s
     e = e.replace('\\', '\\\\')
     e = e.replace('\n', '\\n')
@@ -2853,7 +2853,7 @@ def _cleanse_time_partitioning(
 
 
 def _validate_value(key: Any, value: Any, expected_type: Type) -> None:
-    """ Function to check expected type and raise error if type is not correct. """
+    """Function to check expected type and raise error if type is not correct"""
     if not isinstance(value, expected_type):
         raise TypeError("{} argument must have a type {} not {}".format(
             key, expected_type, type(value)))
diff --git a/airflow/providers/google/cloud/hooks/kms.py b/airflow/providers/google/cloud/hooks/kms.py
index 2ab10f6..1ecc706 100644
--- a/airflow/providers/google/cloud/hooks/kms.py
+++ b/airflow/providers/google/cloud/hooks/kms.py
@@ -31,12 +31,12 @@ from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
 
 
 def _b64encode(s: bytes) -> str:
-    """ Base 64 encodes a bytes object to a string """
+    """Base 64 encodes a bytes object to a string"""
     return base64.b64encode(s).decode("ascii")
 
 
 def _b64decode(s: str) -> bytes:
-    """ Base 64 decodes a string to bytes. """
+    """Base 64 decodes a string to bytes"""
     return base64.b64decode(s.encode("utf-8"))
 
 
diff --git a/airflow/settings.py b/airflow/settings.py
index 4ab6738..f6a9c4a 100644
--- a/airflow/settings.py
+++ b/airflow/settings.py
@@ -139,7 +139,7 @@ def pod_mutation_hook(pod):  # pylint: disable=unused-argument
 
 # pylint: disable=global-statement
 def configure_vars():
-    """ Configure Global Variables from airflow.cfg"""
+    """Configure Global Variables from airflow.cfg"""
     global SQL_ALCHEMY_CONN
     global DAGS_FOLDER
     global PLUGINS_FOLDER
@@ -154,7 +154,7 @@ def configure_vars():
 
 
 def configure_orm(disable_connection_pool=False):
-    """ Configure ORM using SQLAlchemy"""
+    """Configure ORM using SQLAlchemy"""
     log.debug("Setting up DB connection pool (PID %s)", os.getpid())
     global engine
     global Session
@@ -222,7 +222,7 @@ def configure_orm(disable_connection_pool=False):
 
 
 def dispose_orm():
-    """ Properly close pooled database connections """
+    """Properly close pooled database connections"""
     log.debug("Disposing DB connection pool (PID %s)", os.getpid())
     global engine
     global Session
@@ -236,7 +236,7 @@ def dispose_orm():
 
 
 def configure_adapters():
-    """ Register Adapters and DB Converters """
+    """Register Adapters and DB Converters"""
     from pendulum import DateTime as Pendulum
     try:
         from sqlite3 import register_adapter
@@ -256,7 +256,7 @@ def configure_adapters():
 
 
 def validate_session():
-    """ Validate ORM Session """
+    """Validate ORM Session"""
     worker_precheck = conf.getboolean('core', 'worker_precheck', fallback=False)
     if not worker_precheck:
         return True
@@ -300,7 +300,7 @@ def prepare_syspath():
 
 
 def import_local_settings():
-    """ Import airflow_local_settings.py files to allow overriding any configs in settings.py file """
+    """Import airflow_local_settings.py files to allow overriding any configs in settings.py file"""
     try:  # pylint: disable=too-many-nested-blocks
         import airflow_local_settings
 
@@ -318,7 +318,7 @@ def import_local_settings():
 
 
 def initialize():
-    """ Initialize Airflow with all the settings from this file """
+    """Initialize Airflow with all the settings from this file"""
     configure_vars()
     prepare_syspath()
     import_local_settings()
diff --git a/airflow/stats.py b/airflow/stats.py
index f2abc01..026a308 100644
--- a/airflow/stats.py
+++ b/airflow/stats.py
@@ -127,7 +127,7 @@ class AllowListValidator:
             self.allow_list = None
 
     def test(self, stat):
-        """ Test if stat is in the Allow List """
+        """Test if stat is in the Allow List"""
         if self.allow_list is not None:
             return stat.strip().lower().startswith(self.allow_list)
         else:
diff --git a/airflow/utils/log/log_reader.py b/airflow/utils/log/log_reader.py
index 6aab9e6..dbfe476 100644
--- a/airflow/utils/log/log_reader.py
+++ b/airflow/utils/log/log_reader.py
@@ -27,7 +27,7 @@ from airflow.utils.log.logging_mixin import ExternalLoggingMixin
 
 
 class TaskLogReader:
-    """ Task log reader"""
+    """Task log reader"""
 
     def read_log_chunks(self, ti: TaskInstance, try_number: Optional[int],
                         metadata) -> Tuple[List[str], Dict[str, Any]]:
diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py
index 023ab72..6805ed7 100644
--- a/airflow/www/api/experimental/endpoints.py
+++ b/airflow/www/api/experimental/endpoints.py
@@ -171,14 +171,14 @@ def dag_runs(dag_id):
 @api_experimental.route('/test', methods=['GET'])
 @requires_authentication
 def test():
-    """ Test endpoint to check authentication """
+    """Test endpoint to check authentication"""
     return jsonify(status='OK')
 
 
 @api_experimental.route('/info', methods=['GET'])
 @requires_authentication
 def info():
-    """ Get Airflow Version """
+    """Get Airflow Version"""
     return jsonify(version=version)
 
 
@@ -198,7 +198,7 @@ def get_dag_code(dag_id):
 @api_experimental.route('/dags/<string:dag_id>/tasks/<string:task_id>', methods=['GET'])
 @requires_authentication
 def task_info(dag_id, task_id):
-    """Returns a JSON with a task's public instance variables. """
+    """Returns a JSON with a task's public instance variables"""
     try:
         t_info = get_task(dag_id, task_id)
     except AirflowException as err:
@@ -320,7 +320,7 @@ def dag_run_status(dag_id, execution_date):
 @api_experimental.route('/latest_runs', methods=['GET'])
 @requires_authentication
 def latest_dag_runs():
-    """Returns the latest DagRun for each DAG formatted for the UI. """
+    """Returns the latest DagRun for each DAG formatted for the UI"""
     from airflow.models import DagRun
     dagruns = DagRun.get_latest_runs()
     payload = []
@@ -402,7 +402,7 @@ def delete_pool(name):
                         methods=['GET'])
 @requires_authentication
 def get_lineage(dag_id: str, execution_date: str):
-    """ Get Lineage details for a DagRun """
+    """Get Lineage details for a DagRun"""
     # Convert string datetime into actual datetime
     try:
         execution_dt = timezone.parse(execution_date)
diff --git a/airflow/www/utils.py b/airflow/www/utils.py
index f81de13..efcb518 100644
--- a/airflow/www/utils.py
+++ b/airflow/www/utils.py
@@ -57,7 +57,7 @@ def get_sensitive_variables_fields():
 
 
 def should_hide_value_for_key(key_name):
-    """Returns True if hide_sensitive_variable_fields is True, else False """
+    """Returns True if hide_sensitive_variable_fields is True, else False"""
     # It is possible via importing variables from file that a key is empty.
     if key_name:
         config_set = conf.getboolean('admin', 'hide_sensitive_variable_fields')
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 129c0ed..260af26 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -96,7 +96,7 @@ def get_safe_url(url):
 
 
 def get_date_time_num_runs_dag_runs_form_data(www_request, session, dag):
-    """Get Execution Data, Base Date & Number of runs from a Request """
+    """Get Execution Data, Base Date & Number of runs from a Request"""
     date_time = www_request.args.get('execution_date')
     if date_time:
         date_time = timezone.parse(date_time)
diff --git a/dev/airflow-pr b/dev/airflow-pr
index ef24836..916604c2 100755
--- a/dev/airflow-pr
+++ b/dev/airflow-pr
@@ -460,7 +460,7 @@ def fix_version_from_branch(branch, versions):
 
 
 def register(username, password):
-    """ Use this function to register a JIRA account in your OS' keyring """
+    """Use this function to register a JIRA account in your OS' keyring """
     keyring.set_password('airflow-pr', 'username', username)
     keyring.set_password('airflow-pr', 'password', password)
 
diff --git a/docs/exts/docroles.py b/docs/exts/docroles.py
index ee10d4e..58346ab 100644
--- a/docs/exts/docroles.py
+++ b/docs/exts/docroles.py
@@ -25,7 +25,7 @@ from sphinx.ext.autodoc.importer import import_module, mock
 
 
 class RoleException(Exception):
-    """Exception for roles extension """
+    """Exception for roles extension"""
 
 
 def get_template_field(env, fullname):
diff --git a/tests/dags/test_task_view_type_check.py b/tests/dags/test_task_view_type_check.py
index fa04b5e..6f4585b 100644
--- a/tests/dags/test_task_view_type_check.py
+++ b/tests/dags/test_task_view_type_check.py
@@ -38,11 +38,11 @@ class CallableClass:
     Class that is callable.
     """
     def __call__(self):
-        """ A __call__ method """
+        """A __call__ method """
 
 
 def a_function(_, __):
-    """ A function with two args """
+    """A function with two args """
 
 
 partial_function = functools.partial(a_function, arg_x=1)
diff --git a/tests/operators/test_sql.py b/tests/operators/test_sql.py
index 5720847..5e07616 100644
--- a/tests/operators/test_sql.py
+++ b/tests/operators/test_sql.py
@@ -356,7 +356,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
             session.query(TI).delete()
 
     def test_unsupported_conn_type(self):
-        """ Check if BranchSQLOperator throws an exception for unsupported connection type """
+        """Check if BranchSQLOperator throws an exception for unsupported connection type """
         op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="redis_default",
@@ -371,7 +371,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
                    end_date=DEFAULT_DATE, ignore_ti_state=True)
 
     def test_invalid_conn(self):
-        """ Check if BranchSQLOperator throws an exception for invalid connection """
+        """Check if BranchSQLOperator throws an exception for invalid connection """
         op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="invalid_connection",
@@ -386,7 +386,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
                    end_date=DEFAULT_DATE, ignore_ti_state=True)
 
     def test_invalid_follow_task_true(self):
-        """ Check if BranchSQLOperator throws an exception for invalid connection """
+        """Check if BranchSQLOperator throws an exception for invalid connection """
         op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="invalid_connection",
@@ -401,7 +401,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
                    end_date=DEFAULT_DATE, ignore_ti_state=True)
 
     def test_invalid_follow_task_false(self):
-        """ Check if BranchSQLOperator throws an exception for invalid connection """
+        """Check if BranchSQLOperator throws an exception for invalid connection """
         op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="invalid_connection",
@@ -417,7 +417,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @pytest.mark.backend("mysql")
     def test_sql_branch_operator_mysql(self):
-        """ Check if BranchSQLOperator works with backend """
+        """Check if BranchSQLOperator works with backend """
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -432,7 +432,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @pytest.mark.backend("postgres")
     def test_sql_branch_operator_postgres(self):
-        """ Check if BranchSQLOperator works with backend """
+        """Check if BranchSQLOperator works with backend """
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="postgres_default",
@@ -447,7 +447,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseHook")
     def test_branch_single_value_with_dag_run(self, mock_hook):
-        """ Check BranchSQLOperator branch operation """
+        """Check BranchSQLOperator branch operation """
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -490,7 +490,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseHook")
     def test_branch_true_with_dag_run(self, mock_hook):
-        """ Check BranchSQLOperator branch operation """
+        """Check BranchSQLOperator branch operation """
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -534,7 +534,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseHook")
     def test_branch_false_with_dag_run(self, mock_hook):
-        """ Check BranchSQLOperator branch operation """
+        """Check BranchSQLOperator branch operation """
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -578,7 +578,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseHook")
     def test_branch_list_with_dag_run(self, mock_hook):
-        """ Checks if the BranchSQLOperator supports branching off to a list of tasks."""
+        """Checks if the BranchSQLOperator supports branching off to a list of tasks."""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -624,7 +624,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseHook")
     def test_invalid_query_result_with_dag_run(self, mock_hook):
-        """ Check BranchSQLOperator branch operation """
+        """Check BranchSQLOperator branch operation """
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -657,7 +657,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseHook")
     def test_with_skip_in_branch_downstream_dependencies(self, mock_hook):
-        """ Test SQL Branch with skipping all downstream dependencies """
+        """Test SQL Branch with skipping all downstream dependencies """
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
@@ -701,7 +701,7 @@ class TestSqlBranch(TestHiveEnvironment, unittest.TestCase):
 
     @mock.patch("airflow.operators.sql.BaseHook")
     def test_with_skip_in_branch_downstream_dependencies2(self, mock_hook):
-        """ Test skipping downstream dependency for false condition"""
+        """Test skipping downstream dependency for false condition"""
         branch_op = BranchSQLOperator(
             task_id="make_choice",
             conn_id="mysql_default",
diff --git a/tests/providers/jenkins/hooks/test_jenkins.py b/tests/providers/jenkins/hooks/test_jenkins.py
index 13da3c5..044ac46 100644
--- a/tests/providers/jenkins/hooks/test_jenkins.py
+++ b/tests/providers/jenkins/hooks/test_jenkins.py
@@ -26,7 +26,7 @@ class TestJenkinsHook(unittest.TestCase):
 
     @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection')
     def test_client_created_default_http(self, get_connection_mock):
-        """ tests `init` method to validate http client creation when all parameters are passed """
+        """tests `init` method to validate http client creation when all parameters are passed """
         default_connection_id = 'jenkins_default'
 
         connection_host = 'http://test.com'
@@ -43,7 +43,7 @@ class TestJenkinsHook(unittest.TestCase):
 
     @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection')
     def test_client_created_default_https(self, get_connection_mock):
-        """ tests `init` method to validate https client creation when all
+        """tests `init` method to validate https client creation when all
         parameters are passed """
         default_connection_id = 'jenkins_default'
 
diff --git a/tests/providers/slack/hooks/test_slack.py b/tests/providers/slack/hooks/test_slack.py
index 9334948..3c7bade 100644
--- a/tests/providers/slack/hooks/test_slack.py
+++ b/tests/providers/slack/hooks/test_slack.py
@@ -28,7 +28,7 @@ from airflow.providers.slack.hooks.slack import SlackHook
 class TestSlackHook(unittest.TestCase):
 
     def test_get_token_with_token_only(self):
-        """ tests `__get_token` method when only token is provided """
+        """tests `__get_token` method when only token is provided """
         # Given
         test_token = 'test_token'
         test_conn_id = None
@@ -44,7 +44,7 @@ class TestSlackHook(unittest.TestCase):
     @mock.patch('airflow.providers.slack.hooks.slack.WebClient')
     @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
     def test_get_token_with_valid_slack_conn_id_only(self, get_connection_mock, mock_slack_client):
-        """ tests `__get_token` method when only connection is provided """
+        """tests `__get_token` method when only connection is provided """
         # Given
         test_token = None
         test_conn_id = 'x'
@@ -64,7 +64,7 @@ class TestSlackHook(unittest.TestCase):
 
     @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
     def test_get_token_with_no_password_slack_conn_id_only(self, get_connection_mock):
-        """ tests `__get_token` method when only connection is provided """
+        """tests `__get_token` method when only connection is provided """
 
         # Mock
         conn = mock.Mock()
@@ -76,7 +76,7 @@ class TestSlackHook(unittest.TestCase):
 
     @mock.patch('airflow.providers.slack.hooks.slack.SlackHook.get_connection')
     def test_get_token_with_empty_password_slack_conn_id_only(self, get_connection_mock):
-        """ tests `__get_token` method when only connection is provided """
+        """tests `__get_token` method when only connection is provided """
 
         # Mock
         get_connection_mock.return_value = mock.Mock(password=None)
@@ -85,7 +85,7 @@ class TestSlackHook(unittest.TestCase):
         self.assertRaises(AirflowException, SlackHook, token=None, slack_conn_id='x')
 
     def test_get_token_with_token_and_slack_conn_id(self):
-        """ tests `__get_token` method when both arguments are provided """
+        """tests `__get_token` method when both arguments are provided """
         # Given
         test_token = 'test_token'
         test_conn_id = 'x'
@@ -99,7 +99,7 @@ class TestSlackHook(unittest.TestCase):
         self.assertEqual(output, expected)
 
     def test_get_token_with_out_token_nor_slack_conn_id(self):
-        """ tests `__get_token` method when no arguments are provided """
+        """tests `__get_token` method when no arguments are provided """
 
         self.assertRaises(AirflowException, SlackHook, token=None, slack_conn_id=None)
 
diff --git a/tests/providers/yandex/hooks/test_yandex.py b/tests/providers/yandex/hooks/test_yandex.py
index 3c1613b..c9493b4 100644
--- a/tests/providers/yandex/hooks/test_yandex.py
+++ b/tests/providers/yandex/hooks/test_yandex.py
@@ -29,7 +29,7 @@ class TestYandexHook(unittest.TestCase):
     @mock.patch('airflow.providers.yandex.hooks.yandex.YandexCloudBaseHook._get_credentials')
     def test_client_created_without_exceptions(self, get_credentials_mock,
                                                get_connection_mock):
-        """ tests `init` method to validate client creation when all parameters are passed """
+        """tests `init` method to validate client creation when all parameters are passed """
 
         # Inputs to constructor
         default_folder_id = 'test_id'
@@ -49,7 +49,7 @@ class TestYandexHook(unittest.TestCase):
     @mock.patch('airflow.hooks.base_hook.BaseHook.get_connection')
     def test_get_credentials_raise_exception(self, get_connection_mock):
 
-        """ tests 'get_credentials' method raising exception if none of the required fields are passed."""
+        """tests 'get_credentials' method raising exception if none of the required fields are passed."""
 
         # Inputs to constructor
         default_folder_id = 'test_id'
diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py
index 152a3c2..948ca99 100644
--- a/tests/serialization/test_dag_serialization.py
+++ b/tests/serialization/test_dag_serialization.py
@@ -135,7 +135,7 @@ def make_simple_dag():
 
 
 def make_user_defined_macro_filter_dag():
-    """ Make DAGs with user defined macros and filters using locally defined methods.
+    """Make DAGs with user defined macros and filters using locally defined methods.
 
     For Webserver, we do not include ``user_defined_macros`` & ``user_defined_filters``.