You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by ro...@apache.org on 2020/12/29 13:30:16 UTC

[buildstream] 05/08: Reformat code using Black

This is an automated email from the ASF dual-hosted git repository.

root pushed a commit to branch frazer/flake8
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 21dfc03f6b118f2470d34386703c05b1a06cc1b1
Author: Chandan Singh <cs...@bloomberg.net>
AuthorDate: Mon Nov 11 17:07:09 2019 +0000

    Reformat code using Black
    
    As discussed over the mailing list, reformat code using Black. This is a
    one-off change to reformat all our codebase. Moving forward, we
    shouldn't expect such blanket reformats. Rather, we expect each change
    to already comply with the Black formatting style.
---
 src/buildstream/__init__.py                        |    4 +-
 src/buildstream/__main__.py                        |    3 +-
 src/buildstream/_artifact.py                       |   41 +-
 src/buildstream/_artifactcache.py                  |   63 +-
 src/buildstream/_artifactelement.py                |    6 +-
 src/buildstream/_basecache.py                      |   36 +-
 src/buildstream/_cachekey.py                       |    2 +-
 src/buildstream/_cas/cascache.py                   |  159 ++--
 src/buildstream/_cas/casremote.py                  |   30 +-
 src/buildstream/_cas/casserver.py                  |  140 +--
 src/buildstream/_context.py                        |  176 ++--
 src/buildstream/_elementfactory.py                 |   15 +-
 src/buildstream/_exceptions.py                     |    9 +-
 src/buildstream/_frontend/app.py                   |  456 +++++----
 src/buildstream/_frontend/cli.py                   |  943 ++++++++++--------
 src/buildstream/_frontend/complete.py              |   71 +-
 src/buildstream/_frontend/linuxapp.py              |    7 +-
 src/buildstream/_frontend/profile.py               |    3 +-
 src/buildstream/_frontend/status.py                |  160 ++--
 src/buildstream/_frontend/widget.py                |  344 ++++---
 src/buildstream/_gitsourcebase.py                  |  425 +++++----
 src/buildstream/_includes.py                       |   60 +-
 src/buildstream/_loader/loader.py                  |  198 ++--
 src/buildstream/_loader/metaelement.py             |   20 +-
 src/buildstream/_loader/metasource.py              |    2 +-
 src/buildstream/_message.py                        |   76 +-
 src/buildstream/_messenger.py                      |   47 +-
 src/buildstream/_options/option.py                 |   14 +-
 src/buildstream/_options/optionarch.py             |   10 +-
 src/buildstream/_options/optionbool.py             |   15 +-
 src/buildstream/_options/optioneltmask.py          |    4 +-
 src/buildstream/_options/optionenum.py             |   28 +-
 src/buildstream/_options/optionflags.py            |   32 +-
 src/buildstream/_options/optionos.py               |    3 +-
 src/buildstream/_options/optionpool.py             |   50 +-
 src/buildstream/_pipeline.py                       |   70 +-
 src/buildstream/_platform/darwin.py                |    6 +-
 src/buildstream/_platform/fallback.py              |   10 +-
 src/buildstream/_platform/linux.py                 |   38 +-
 src/buildstream/_platform/platform.py              |   69 +-
 src/buildstream/_platform/win32.py                 |    3 +-
 src/buildstream/_plugincontext.py                  |  140 +--
 src/buildstream/_profile.py                        |   45 +-
 src/buildstream/_project.py                        |  368 +++----
 src/buildstream/_projectrefs.py                    |   17 +-
 src/buildstream/_remote.py                         |   66 +-
 src/buildstream/_scheduler/jobs/elementjob.py      |   12 +-
 src/buildstream/_scheduler/jobs/job.py             |  145 ++-
 src/buildstream/_scheduler/jobs/jobpickler.py      |   14 +-
 src/buildstream/_scheduler/queues/buildqueue.py    |   13 +-
 src/buildstream/_scheduler/queues/queue.py         |   54 +-
 src/buildstream/_scheduler/resources.py            |   13 +-
 src/buildstream/_scheduler/scheduler.py            |   83 +-
 src/buildstream/_signals.py                        |   20 +-
 src/buildstream/_site.py                           |   14 +-
 src/buildstream/_sourcecache.py                    |   40 +-
 src/buildstream/_sourcefactory.py                  |   11 +-
 src/buildstream/_state.py                          |   13 +-
 src/buildstream/_stream.py                         |  538 ++++++-----
 src/buildstream/_version.py                        |  133 +--
 src/buildstream/_workspaces.py                     |   99 +-
 src/buildstream/buildelement.py                    |   59 +-
 src/buildstream/element.py                         |  601 ++++++------
 src/buildstream/plugin.py                          |  118 ++-
 src/buildstream/plugins/elements/autotools.py      |    3 +-
 src/buildstream/plugins/elements/compose.py        |   37 +-
 src/buildstream/plugins/elements/filter.py         |   59 +-
 src/buildstream/plugins/elements/import.py         |   28 +-
 src/buildstream/plugins/elements/junction.py       |   10 +-
 src/buildstream/plugins/elements/manual.py         |    3 +-
 src/buildstream/plugins/elements/pip.py            |    3 +-
 src/buildstream/plugins/elements/script.py         |   12 +-
 src/buildstream/plugins/elements/stack.py          |    4 +-
 .../plugins/sources/_downloadablefilesource.py     |   61 +-
 src/buildstream/plugins/sources/bzr.py             |  109 ++-
 src/buildstream/plugins/sources/deb.py             |    6 +-
 src/buildstream/plugins/sources/local.py           |    8 +-
 src/buildstream/plugins/sources/patch.py           |   12 +-
 src/buildstream/plugins/sources/pip.py             |  106 ++-
 src/buildstream/plugins/sources/remote.py          |   11 +-
 src/buildstream/plugins/sources/tar.py             |   45 +-
 src/buildstream/plugins/sources/workspace.py       |   12 +-
 src/buildstream/plugins/sources/zip.py             |   14 +-
 src/buildstream/sandbox/_config.py                 |   11 +-
 src/buildstream/sandbox/_mount.py                  |   19 +-
 src/buildstream/sandbox/_mounter.py                |   48 +-
 src/buildstream/sandbox/_sandboxbuildbox.py        |   73 +-
 src/buildstream/sandbox/_sandboxbwrap.py           |  118 ++-
 src/buildstream/sandbox/_sandboxchroot.py          |   60 +-
 src/buildstream/sandbox/_sandboxdummy.py           |   11 +-
 src/buildstream/sandbox/_sandboxreapi.py           |   47 +-
 src/buildstream/sandbox/_sandboxremote.py          |  215 +++--
 src/buildstream/sandbox/sandbox.py                 |  120 ++-
 src/buildstream/scriptelement.py                   |   72 +-
 src/buildstream/source.py                          |  169 ++--
 src/buildstream/storage/_casbaseddirectory.py      |  111 +--
 src/buildstream/storage/_filebaseddirectory.py     |   64 +-
 src/buildstream/storage/directory.py               |   18 +-
 src/buildstream/testing/__init__.py                |    9 +-
 src/buildstream/testing/_fixtures.py               |    1 +
 .../testing/_sourcetests/build_checkout.py         |   36 +-
 src/buildstream/testing/_sourcetests/fetch.py      |   57 +-
 src/buildstream/testing/_sourcetests/mirror.py     |  318 +++----
 .../testing/_sourcetests/source_determinism.py     |   75 +-
 src/buildstream/testing/_sourcetests/track.py      |  245 ++---
 .../testing/_sourcetests/track_cross_junction.py   |  134 ++-
 src/buildstream/testing/_sourcetests/utils.py      |   15 +-
 src/buildstream/testing/_sourcetests/workspace.py  |   85 +-
 src/buildstream/testing/_utils/junction.py         |   41 +-
 src/buildstream/testing/_utils/site.py             |   43 +-
 src/buildstream/testing/integration.py             |   22 +-
 src/buildstream/testing/repo.py                    |    7 +-
 src/buildstream/testing/runcli.py                  |  282 +++---
 src/buildstream/types.py                           |    2 +-
 src/buildstream/utils.py                           |  265 +++---
 tests/artifactcache/artifactservice.py             |    6 +-
 tests/artifactcache/capabilities.py                |   25 +-
 tests/artifactcache/config.py                      |  186 ++--
 tests/artifactcache/expiry.py                      |  259 +++--
 tests/artifactcache/junctions.py                   |  153 ++-
 tests/artifactcache/pull.py                        |   84 +-
 tests/artifactcache/push.py                        |  105 +-
 tests/cachekey/cachekey.py                         |  121 +--
 tests/cachekey/update.py                           |   29 +-
 tests/conftest.py                                  |   76 +-
 tests/elements/filter.py                           |  285 +++---
 .../filter/basic/element_plugins/dynamic.py        |    6 +-
 tests/examples/autotools.py                        |   50 +-
 tests/examples/developing.py                       |   61 +-
 tests/examples/first-project.py                    |   14 +-
 tests/examples/flatpak-autotools.py                |   43 +-
 tests/examples/integration-commands.py             |   24 +-
 tests/examples/junctions.py                        |   36 +-
 tests/examples/running-commands.py                 |   26 +-
 tests/external_plugins.py                          |   14 +-
 tests/format/assertion.py                          |   49 +-
 tests/format/dependencies.py                       |  164 ++--
 tests/format/include.py                            |  265 ++----
 tests/format/include_composition.py                |  101 +-
 tests/format/invalid_keys.py                       |   20 +-
 tests/format/junctions.py                          |  301 +++---
 tests/format/listdirectiveerrors.py                |   32 +-
 tests/format/optionarch.py                         |   88 +-
 tests/format/optionbool.py                         |  121 +--
 tests/format/optioneltmask.py                      |   82 +-
 tests/format/optionenum.py                         |  128 ++-
 tests/format/optionexports.py                      |   41 +-
 tests/format/optionflags.py                        |  148 ++-
 tests/format/optionos.py                           |   53 +-
 tests/format/optionoverrides.py                    |   17 +-
 tests/format/options.py                            |  281 +++---
 tests/format/project.py                            |  161 ++--
 .../plugin-no-load-ref/plugins/noloadref.py        |    1 -
 .../errorplugin/preflighterror.py                  |    4 +-
 tests/format/projectoverrides.py                   |   17 +-
 tests/format/userconfig.py                         |    6 +-
 tests/format/variables.py                          |  189 ++--
 tests/frontend/__init__.py                         |    6 +-
 tests/frontend/artifact_delete.py                  |  140 ++-
 tests/frontend/artifact_list_contents.py           |   79 +-
 tests/frontend/artifact_log.py                     |   33 +-
 tests/frontend/artifact_show.py                    |   80 +-
 tests/frontend/buildcheckout.py                    |  671 ++++++-------
 tests/frontend/completions.py                      |  374 ++++----
 tests/frontend/compose_splits.py                   |   22 +-
 tests/frontend/configurable_warnings.py            |   45 +-
 .../frontend/configuredwarning/plugins/corewarn.py |    3 +-
 .../consistencyerror/plugins/consistencybug.py     |    1 -
 .../consistencyerror/plugins/consistencyerror.py   |    4 +-
 tests/frontend/cross_junction_workspace.py         |   60 +-
 tests/frontend/fetch.py                            |  116 +--
 tests/frontend/help.py                             |   23 +-
 tests/frontend/init.py                             |  108 +--
 tests/frontend/large_directory.py                  |   25 +-
 tests/frontend/logging.py                          |   81 +-
 tests/frontend/main.py                             |    8 +-
 tests/frontend/mirror.py                           |  414 ++++----
 tests/frontend/order.py                            |   79 +-
 tests/frontend/overlaps.py                         |   43 +-
 tests/frontend/progress.py                         |   82 +-
 tests/frontend/project/sources/fetch_source.py     |   17 +-
 tests/frontend/pull.py                             |  340 +++----
 tests/frontend/push.py                             |  483 +++++-----
 tests/frontend/rebuild.py                          |   13 +-
 tests/frontend/remote-caches.py                    |   46 +-
 tests/frontend/show.py                             |  501 +++++-----
 tests/frontend/source_checkout.py                  |  189 ++--
 tests/frontend/track.py                            |  227 ++---
 tests/frontend/version.py                          |   10 +-
 tests/frontend/workspace.py                        | 1002 ++++++++------------
 tests/integration/artifact.py                      |   85 +-
 tests/integration/autotools.py                     |   73 +-
 tests/integration/build-uid.py                     |   49 +-
 tests/integration/cachedfail.py                    |  194 ++--
 tests/integration/cmake.py                         |   44 +-
 tests/integration/compose-symlinks.py              |   15 +-
 tests/integration/compose.py                       |  154 +--
 tests/integration/filter.py                        |   19 +-
 tests/integration/import.py                        |   50 +-
 tests/integration/make.py                          |   28 +-
 tests/integration/manual.py                        |  164 ++--
 tests/integration/messages.py                      |   50 +-
 tests/integration/pip_element.py                   |  104 +-
 tests/integration/pip_source.py                    |  186 ++--
 tests/integration/project/files/pip-source/app1.py |    4 +-
 tests/integration/pullbuildtrees.py                |  122 ++-
 tests/integration/sandbox-bwrap.py                 |   39 +-
 tests/integration/script.py                        |  222 +++--
 tests/integration/shell.py                         |  348 ++++---
 tests/integration/shellbuildtrees.py               |  309 +++---
 tests/integration/sockets.py                       |   17 +-
 tests/integration/source-determinism.py            |   69 +-
 tests/integration/stack.py                         |   19 +-
 tests/integration/symlinks.py                      |   53 +-
 tests/integration/workspace.py                     |  262 +++--
 tests/internals/context.py                         |   78 +-
 tests/internals/loader.py                          |   35 +-
 tests/internals/pluginfactory.py                   |  315 +++---
 tests/internals/pluginfactory/wrongtype/foo.py     |    2 +-
 tests/internals/pluginloading.py                   |   27 +-
 .../customelement/pluginelements/foo.py            |    1 -
 .../customsource/pluginsources/foo.py              |    1 -
 tests/internals/storage.py                         |   11 +-
 tests/internals/storage_vdir_import.py             |  127 ++-
 tests/internals/utils_save_atomic.py               |   48 +-
 tests/internals/yaml.py                            |  356 +++----
 .../deprecationwarnings/deprecationwarnings.py     |   19 +-
 tests/remoteexecution/buildfail.py                 |   35 +-
 tests/remoteexecution/buildtree.py                 |   39 +-
 tests/remoteexecution/junction.py                  |   66 +-
 tests/remoteexecution/partial.py                   |   59 +-
 tests/remoteexecution/simple.py                    |   43 +-
 tests/sandboxes/fallback.py                        |   40 +-
 tests/sandboxes/missing-command.py                 |   11 +-
 tests/sandboxes/missing_dependencies.py            |   73 +-
 tests/sandboxes/mounting/mount_simple.py           |   18 +-
 tests/sandboxes/remote-exec-config.py              |   68 +-
 tests/sandboxes/selection.py                       |   57 +-
 tests/sourcecache/cache.py                         |   53 +-
 tests/sourcecache/capabilities.py                  |   24 +-
 tests/sourcecache/config.py                        |   20 +-
 tests/sourcecache/fetch.py                         |   94 +-
 .../project/plugins/elements/always_fail.py        |    1 -
 tests/sourcecache/push.py                          |  175 ++--
 tests/sourcecache/source-checkout.py               |   28 +-
 tests/sourcecache/staging.py                       |   58 +-
 tests/sourcecache/workspace.py                     |   61 +-
 tests/sources/bzr.py                               |   30 +-
 tests/sources/deb.py                               |   64 +-
 tests/sources/git.py                               |  990 +++++++++----------
 tests/sources/keytest.py                           |    3 +-
 tests/sources/local.py                             |  165 ++--
 .../plugins/sources/always_cached.py               |    1 -
 tests/sources/no_fetch_cached.py                   |   25 +-
 tests/sources/patch.py                             |   93 +-
 tests/sources/pip.py                               |   46 +-
 tests/sources/previous_source_access.py            |   31 +-
 .../plugins/sources/foo_transform.py               |   26 +-
 tests/sources/remote.py                            |  155 ++-
 tests/sources/tar.py                               |  236 +++--
 tests/sources/zip.py                               |  109 +--
 tests/testutils/artifactshare.py                   |   37 +-
 tests/testutils/element_generators.py              |   24 +-
 tests/testutils/file_server.py                     |    4 +-
 tests/testutils/filetypegenerator.py               |    2 +-
 tests/testutils/ftp_server.py                      |    6 +-
 tests/testutils/http_server.py                     |   53 +-
 tests/testutils/junction.py                        |    9 +-
 tests/testutils/patch.py                           |   10 +-
 tests/testutils/python_repo.py                     |   45 +-
 tests/testutils/repo/bzr.py                        |   32 +-
 tests/testutils/repo/git.py                        |   71 +-
 tests/testutils/repo/tar.py                        |   14 +-
 tests/testutils/repo/zip.py                        |   16 +-
 tests/testutils/setuptools.py                      |   13 +-
 275 files changed, 12098 insertions(+), 13931 deletions(-)

diff --git a/src/buildstream/__init__.py b/src/buildstream/__init__.py
index cd8d0f1..c78fcbb 100644
--- a/src/buildstream/__init__.py
+++ b/src/buildstream/__init__.py
@@ -19,11 +19,13 @@
 
 # Plugin author facing APIs
 import os
+
 if "_BST_COMPLETION" not in os.environ:
 
     # Special sauce to get the version from versioneer
     from ._version import get_versions
-    __version__ = get_versions()['version']
+
+    __version__ = get_versions()["version"]
     del get_versions
 
     from .utils import UtilError, ProgramNotFoundError
diff --git a/src/buildstream/__main__.py b/src/buildstream/__main__.py
index 4b0fdab..556a0f6 100644
--- a/src/buildstream/__main__.py
+++ b/src/buildstream/__main__.py
@@ -11,7 +11,8 @@
 # This is used when we need to run BuildStream before installing,
 # like when we build documentation.
 #
-if __name__ == '__main__':
+if __name__ == "__main__":
     # pylint: disable=no-value-for-parameter
     from ._frontend.cli import cli
+
     cli()
diff --git a/src/buildstream/_artifact.py b/src/buildstream/_artifact.py
index e5174ea..feba389 100644
--- a/src/buildstream/_artifact.py
+++ b/src/buildstream/_artifact.py
@@ -47,7 +47,7 @@ from .storage._casbaseddirectory import CasBasedDirectory
 #     strong_key (str): The elements strong cache key, dependent on context
 #     weak_key (str): The elements weak cache key
 #
-class Artifact():
+class Artifact:
 
     version = 0
 
@@ -61,11 +61,11 @@ class Artifact():
         self._tmpdir = context.tmpdir
         self._proto = None
 
-        self._metadata_keys = None                    # Strong and weak key tuple extracted from the artifact
-        self._metadata_dependencies = None             # Dictionary of dependency strong keys from the artifact
-        self._metadata_workspaced = None              # Boolean of whether it's a workspaced artifact
+        self._metadata_keys = None  # Strong and weak key tuple extracted from the artifact
+        self._metadata_dependencies = None  # Dictionary of dependency strong keys from the artifact
+        self._metadata_workspaced = None  # Boolean of whether it's a workspaced artifact
         self._metadata_workspaced_dependencies = None  # List of which dependencies are workspaced from the artifact
-        self._cached = None                          # Boolean of whether the artifact is cached
+        self._cached = None  # Boolean of whether the artifact is cached
 
     # get_files():
     #
@@ -193,12 +193,11 @@ class Artifact():
             artifact.buildtree.CopyFrom(buildtreevdir._get_digest())
             size += buildtreevdir.get_size()
 
-        os.makedirs(os.path.dirname(os.path.join(
-            self._artifactdir, element.get_artifact_name())), exist_ok=True)
+        os.makedirs(os.path.dirname(os.path.join(self._artifactdir, element.get_artifact_name())), exist_ok=True)
         keys = utils._deduplicate([self._cache_key, self._weak_cache_key])
         for key in keys:
             path = os.path.join(self._artifactdir, element.get_artifact_name(key=key))
-            with utils.save_file_atomic(path, mode='wb') as f:
+            with utils.save_file_atomic(path, mode="wb") as f:
                 f.write(artifact.SerializeToString())
 
         return size
@@ -247,7 +246,7 @@ class Artifact():
         # Load the public data from the artifact
         artifact = self._get_proto()
         meta_file = self._cas.objpath(artifact.public_data)
-        data = _yaml.load(meta_file, shortname='public.yaml')
+        data = _yaml.load(meta_file, shortname="public.yaml")
 
         return data
 
@@ -263,9 +262,7 @@ class Artifact():
     def load_build_result(self):
 
         artifact = self._get_proto()
-        build_result = (artifact.build_success,
-                        artifact.build_error,
-                        artifact.build_error_details)
+        build_result = (artifact.build_success, artifact.build_error, artifact.build_error_details)
 
         return build_result
 
@@ -345,8 +342,9 @@ class Artifact():
         # Extract proto
         artifact = self._get_proto()
 
-        self._metadata_workspaced_dependencies = [dep.element_name for dep in artifact.build_deps
-                                                  if dep.was_workspaced]
+        self._metadata_workspaced_dependencies = [
+            dep.element_name for dep in artifact.build_deps if dep.was_workspaced
+        ]
 
         return self._metadata_workspaced_dependencies
 
@@ -419,12 +417,14 @@ class Artifact():
         # Determine whether directories are required
         require_directories = context.require_artifact_directories
         # Determine whether file contents are required as well
-        require_files = (context.require_artifact_files or
-                         self._element._artifact_files_required())
+        require_files = context.require_artifact_files or self._element._artifact_files_required()
 
         # Check whether 'files' subdirectory is available, with or without file contents
-        if (require_directories and str(artifact.files) and
-                not self._cas.contains_directory(artifact.files, with_files=require_files)):
+        if (
+            require_directories
+            and str(artifact.files)
+            and not self._cas.contains_directory(artifact.files, with_files=require_files)
+        ):
             self._cached = False
             return False
 
@@ -471,11 +471,10 @@ class Artifact():
 
         key = self.get_extract_key()
 
-        proto_path = os.path.join(self._artifactdir,
-                                  self._element.get_artifact_name(key=key))
+        proto_path = os.path.join(self._artifactdir, self._element.get_artifact_name(key=key))
         artifact = ArtifactProto()
         try:
-            with open(proto_path, mode='r+b') as f:
+            with open(proto_path, mode="r+b") as f:
                 artifact.ParseFromString(f.read())
         except FileNotFoundError:
             return None
diff --git a/src/buildstream/_artifactcache.py b/src/buildstream/_artifactcache.py
index 79d0dc5..03c47b9 100644
--- a/src/buildstream/_artifactcache.py
+++ b/src/buildstream/_artifactcache.py
@@ -23,8 +23,7 @@ import grpc
 
 from ._basecache import BaseCache
 from ._exceptions import ArtifactError, CASError, CASCacheError, CASRemoteError, RemoteError
-from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
-    artifact_pb2, artifact_pb2_grpc
+from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, artifact_pb2, artifact_pb2_grpc
 
 from ._remote import BaseRemote
 from .storage._casbaseddirectory import CasBasedDirectory
@@ -38,7 +37,6 @@ from . import utils
 # artifact remotes.
 #
 class ArtifactRemote(BaseRemote):
-
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
         self.artifact_service = None
@@ -78,8 +76,10 @@ class ArtifactRemote(BaseRemote):
         except grpc.RpcError as e:
             # Check if this remote has the artifact service
             if e.code() == grpc.StatusCode.UNIMPLEMENTED:
-                raise RemoteError("Configured remote does not have the BuildStream "
-                                  "capabilities service. Please check remote configuration.")
+                raise RemoteError(
+                    "Configured remote does not have the BuildStream "
+                    "capabilities service. Please check remote configuration."
+                )
             # Else raise exception with details
             raise RemoteError("Remote initialisation failed: {}".format(e.details()))
 
@@ -263,9 +263,11 @@ class ArtifactCache(BaseCache):
             if self._push_artifact_blobs(artifact, remote):
                 element.info("Pushed data from artifact {} -> {}".format(display_key, remote))
             else:
-                element.info("Remote ({}) already has all data of artifact {} cached".format(
-                    remote, element._get_brief_display_key()
-                ))
+                element.info(
+                    "Remote ({}) already has all data of artifact {} cached".format(
+                        remote, element._get_brief_display_key()
+                    )
+                )
 
         for remote in index_remotes:
             remote.init()
@@ -275,9 +277,9 @@ class ArtifactCache(BaseCache):
                 element.info("Pushed artifact {} -> {}".format(display_key, remote))
                 pushed = True
             else:
-                element.info("Remote ({}) already has artifact {} cached".format(
-                    remote, element._get_brief_display_key()
-                ))
+                element.info(
+                    "Remote ({}) already has artifact {} cached".format(remote, element._get_brief_display_key())
+                )
 
         return pushed
 
@@ -295,7 +297,7 @@ class ArtifactCache(BaseCache):
     #
     def pull(self, element, key, *, pull_buildtrees=False):
         artifact = None
-        display_key = key[:self.context.log_key_length]
+        display_key = key[: self.context.log_key_length]
         project = element._get_project()
 
         errors = []
@@ -310,16 +312,15 @@ class ArtifactCache(BaseCache):
                     element.info("Pulled artifact {} <- {}".format(display_key, remote))
                     break
                 else:
-                    element.info("Remote ({}) does not have artifact {} cached".format(
-                        remote, display_key
-                    ))
+                    element.info("Remote ({}) does not have artifact {} cached".format(remote, display_key))
             except CASError as e:
                 element.warn("Could not pull from remote {}: {}".format(remote, e))
                 errors.append(e)
 
         if errors and not artifact:
-            raise ArtifactError("Failed to pull artifact {}".format(display_key),
-                                detail="\n".join(str(e) for e in errors))
+            raise ArtifactError(
+                "Failed to pull artifact {}".format(display_key), detail="\n".join(str(e) for e in errors)
+            )
 
         # If we don't have an artifact, we can't exactly pull our
         # artifact
@@ -337,16 +338,15 @@ class ArtifactCache(BaseCache):
                     element.info("Pulled data for artifact {} <- {}".format(display_key, remote))
                     return True
 
-                element.info("Remote ({}) does not have artifact {} cached".format(
-                    remote, display_key
-                ))
+                element.info("Remote ({}) does not have artifact {} cached".format(remote, display_key))
             except CASError as e:
                 element.warn("Could not pull from remote {}: {}".format(remote, e))
                 errors.append(e)
 
         if errors:
-            raise ArtifactError("Failed to pull artifact {}".format(display_key),
-                                detail="\n".join(str(e) for e in errors))
+            raise ArtifactError(
+                "Failed to pull artifact {}".format(display_key), detail="\n".join(str(e) for e in errors)
+            )
 
         return False
 
@@ -388,8 +388,9 @@ class ArtifactCache(BaseCache):
             push_remotes = []
 
         if not push_remotes:
-            raise ArtifactError("push_message was called, but no remote artifact " +
-                                "servers are configured as push remotes.")
+            raise ArtifactError(
+                "push_message was called, but no remote artifact " + "servers are configured as push remotes."
+            )
 
         for remote in push_remotes:
             message_digest = remote.push_message(message)
@@ -410,8 +411,7 @@ class ArtifactCache(BaseCache):
         newref = element.get_artifact_name(newkey)
 
         if not os.path.exists(os.path.join(self.artifactdir, newref)):
-            os.link(os.path.join(self.artifactdir, oldref),
-                    os.path.join(self.artifactdir, newref))
+            os.link(os.path.join(self.artifactdir, oldref), os.path.join(self.artifactdir, newref))
 
     # get_artifact_logs():
     #
@@ -425,7 +425,7 @@ class ArtifactCache(BaseCache):
     #
     def get_artifact_logs(self, ref):
         cache_id = self.cas.resolve_ref(ref, update_mtime=True)
-        vdir = CasBasedDirectory(self.cas, digest=cache_id).descend('logs')
+        vdir = CasBasedDirectory(self.cas, digest=cache_id).descend("logs")
         return vdir
 
     # fetch_missing_blobs():
@@ -517,7 +517,7 @@ class ArtifactCache(BaseCache):
         for root, _, files in os.walk(self.artifactdir):
             for artifact_file in files:
                 artifact = artifact_pb2.Artifact()
-                with open(os.path.join(root, artifact_file), 'r+b') as f:
+                with open(os.path.join(root, artifact_file), "r+b") as f:
                     artifact.ParseFromString(f.read())
 
                 if str(artifact.files):
@@ -535,7 +535,7 @@ class ArtifactCache(BaseCache):
         for root, _, files in os.walk(self.artifactdir):
             for artifact_file in files:
                 artifact = artifact_pb2.Artifact()
-                with open(os.path.join(root, artifact_file), 'r+b') as f:
+                with open(os.path.join(root, artifact_file), "r+b") as f:
                     artifact.ParseFromString(f.read())
 
                 if str(artifact.public_data):
@@ -620,8 +620,7 @@ class ArtifactCache(BaseCache):
                 remote.get_artifact(element.get_artifact_name(key=key))
             except grpc.RpcError as e:
                 if e.code() != grpc.StatusCode.NOT_FOUND:
-                    raise ArtifactError("Error checking artifact cache: {}"
-                                        .format(e.details()))
+                    raise ArtifactError("Error checking artifact cache: {}".format(e.details()))
             else:
                 return False
 
@@ -710,7 +709,7 @@ class ArtifactCache(BaseCache):
         # Write the artifact proto to cache
         artifact_path = os.path.join(self.artifactdir, artifact_name)
         os.makedirs(os.path.dirname(artifact_path), exist_ok=True)
-        with utils.save_file_atomic(artifact_path, mode='wb') as f:
+        with utils.save_file_atomic(artifact_path, mode="wb") as f:
             f.write(artifact.SerializeToString())
 
         return artifact
diff --git a/src/buildstream/_artifactelement.py b/src/buildstream/_artifactelement.py
index 48c3d17..1c1c5db 100644
--- a/src/buildstream/_artifactelement.py
+++ b/src/buildstream/_artifactelement.py
@@ -40,7 +40,7 @@ if TYPE_CHECKING:
 class ArtifactElement(Element):
 
     # A hash of ArtifactElement by ref
-    __instantiated_artifacts = {}   # type: Dict[str, ArtifactElement]
+    __instantiated_artifacts = {}  # type: Dict[str, ArtifactElement]
 
     # ArtifactElement's require this as the sandbox will use a normal
     # directory when we checkout
@@ -138,7 +138,7 @@ class ArtifactElement(Element):
     #    sandbox (Sandbox)
     #
     def configure_sandbox(self, sandbox):
-        install_root = self.get_variable('install-root')
+        install_root = self.get_variable("install-root")
 
         # Tell the sandbox to mount the build root and install root
         sandbox.mark_directory(install_root)
@@ -173,7 +173,7 @@ class ArtifactElement(Element):
 #
 def verify_artifact_ref(ref):
     try:
-        project, element, key = ref.split('/', 2)  # This will raise a Value error if unable to split
+        project, element, key = ref.split("/", 2)  # This will raise a Value error if unable to split
         # Explicitly raise a ValueError if the key length is not as expected
         if not _cachekey.is_key(key):
             raise ValueError
diff --git a/src/buildstream/_basecache.py b/src/buildstream/_basecache.py
index fc2e924..516119c 100644
--- a/src/buildstream/_basecache.py
+++ b/src/buildstream/_basecache.py
@@ -37,21 +37,21 @@ if TYPE_CHECKING:
 
 # Base Cache for Caches to derive from
 #
-class BaseCache():
+class BaseCache:
 
     # None of these should ever be called in the base class, but this appeases
     # pylint to some degree
-    spec_name = None                  # type: str
-    spec_error = None                 # type: Type[BstError]
-    config_node_name = None           # type: str
-    index_remote_class = None         # type: Type[BaseRemote]
+    spec_name = None  # type: str
+    spec_error = None  # type: Type[BstError]
+    config_node_name = None  # type: str
+    index_remote_class = None  # type: Type[BaseRemote]
     storage_remote_class = CASRemote  # type: Type[BaseRemote]
 
     def __init__(self, context):
         self.context = context
         self.cas = context.get_cascache()
 
-        self._remotes_setup = False           # Check to prevent double-setup of remotes
+        self._remotes_setup = False  # Check to prevent double-setup of remotes
         # Per-project list of Remote instances.
         self._storage_remotes = {}
         self._index_remotes = {}
@@ -116,8 +116,12 @@ class BaseCache():
                 artifacts = config_node.get_sequence(cls.config_node_name, default=[])
             except LoadError:
                 provenance = config_node.get_node(cls.config_node_name).get_provenance()
-                raise _yaml.LoadError("{}: '{}' must be a single remote mapping, or a list of mappings"
-                                      .format(provenance, cls.config_node_name), _yaml.LoadErrorReason.INVALID_DATA)
+                raise _yaml.LoadError(
+                    "{}: '{}' must be a single remote mapping, or a list of mappings".format(
+                        provenance, cls.config_node_name
+                    ),
+                    _yaml.LoadErrorReason.INVALID_DATA,
+                )
 
         for spec_node in artifacts:
             cache_specs.append(RemoteSpec.new_from_config_node(spec_node))
@@ -144,8 +148,7 @@ class BaseCache():
         project_specs = getattr(project, cls.spec_name)
         context_specs = getattr(context, cls.spec_name)
 
-        return list(utils._deduplicate(
-            project_extra_specs + project_specs + context_specs))
+        return list(utils._deduplicate(project_extra_specs + project_specs + context_specs))
 
     # setup_remotes():
     #
@@ -266,8 +269,9 @@ class BaseCache():
             # Check whether the specified element's project has push remotes
             index_remotes = self._index_remotes[plugin._get_project()]
             storage_remotes = self._storage_remotes[plugin._get_project()]
-            return (any(remote.spec.push for remote in index_remotes) and
-                    any(remote.spec.push for remote in storage_remotes))
+            return any(remote.spec.push for remote in index_remotes) and any(
+                remote.spec.push for remote in storage_remotes
+            )
 
     ################################################
     #               Local Private Methods          #
@@ -323,8 +327,9 @@ class BaseCache():
                 storage_remotes[remote_spec] = storage
 
         self._has_fetch_remotes = storage_remotes and index_remotes
-        self._has_push_remotes = (any(spec.push for spec in storage_remotes) and
-                                  any(spec.push for spec in index_remotes))
+        self._has_push_remotes = any(spec.push for spec in storage_remotes) and any(
+            spec.push for spec in index_remotes
+        )
 
         return index_remotes, storage_remotes
 
@@ -366,8 +371,7 @@ class BaseCache():
     #
     def _message(self, message_type, message, **kwargs):
         args = dict(kwargs)
-        self.context.messenger.message(
-            Message(message_type, message, **args))
+        self.context.messenger.message(Message(message_type, message, **args))
 
     # _set_remotes():
     #
diff --git a/src/buildstream/_cachekey.py b/src/buildstream/_cachekey.py
index 89d4767..dd92075 100644
--- a/src/buildstream/_cachekey.py
+++ b/src/buildstream/_cachekey.py
@@ -62,5 +62,5 @@ def is_key(key):
 #    (str): An sha256 hex digest of the given value
 #
 def generate_key(value):
-    ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode('utf-8')
+    ustring = ujson.dumps(value, sort_keys=True, escape_forward_slashes=False).encode("utf-8")
     return hashlib.sha256(ustring).hexdigest()
diff --git a/src/buildstream/_cas/cascache.py b/src/buildstream/_cas/cascache.py
index 0227304..c1f2b30 100644
--- a/src/buildstream/_cas/cascache.py
+++ b/src/buildstream/_cas/cascache.py
@@ -68,15 +68,14 @@ class CASLogLevel(FastEnum):
 #     protect_session_blobs (bool): Disable expiry for blobs used in the current session
 #     log_level (LogLevel): Log level to give to buildbox-casd for logging
 #
-class CASCache():
-
+class CASCache:
     def __init__(
-            self, path, *, casd=True, cache_quota=None, protect_session_blobs=True, log_level=CASLogLevel.WARNING
+        self, path, *, casd=True, cache_quota=None, protect_session_blobs=True, log_level=CASLogLevel.WARNING
     ):
-        self.casdir = os.path.join(path, 'cas')
-        self.tmpdir = os.path.join(path, 'tmp')
-        os.makedirs(os.path.join(self.casdir, 'refs', 'heads'), exist_ok=True)
-        os.makedirs(os.path.join(self.casdir, 'objects'), exist_ok=True)
+        self.casdir = os.path.join(path, "cas")
+        self.tmpdir = os.path.join(path, "tmp")
+        os.makedirs(os.path.join(self.casdir, "refs", "heads"), exist_ok=True)
+        os.makedirs(os.path.join(self.casdir, "objects"), exist_ok=True)
         os.makedirs(self.tmpdir, exist_ok=True)
 
         self._casd_channel = None
@@ -88,19 +87,19 @@ class CASCache():
         if casd:
             # Place socket in global/user temporary directory to avoid hitting
             # the socket path length limit.
-            self._casd_socket_tempdir = tempfile.mkdtemp(prefix='buildstream')
-            self._casd_socket_path = os.path.join(self._casd_socket_tempdir, 'casd.sock')
+            self._casd_socket_tempdir = tempfile.mkdtemp(prefix="buildstream")
+            self._casd_socket_path = os.path.join(self._casd_socket_tempdir, "casd.sock")
 
-            casd_args = [utils.get_host_tool('buildbox-casd')]
-            casd_args.append('--bind=unix:' + self._casd_socket_path)
-            casd_args.append('--log-level=' + log_level.value)
+            casd_args = [utils.get_host_tool("buildbox-casd")]
+            casd_args.append("--bind=unix:" + self._casd_socket_path)
+            casd_args.append("--log-level=" + log_level.value)
 
             if cache_quota is not None:
-                casd_args.append('--quota-high={}'.format(int(cache_quota)))
-                casd_args.append('--quota-low={}'.format(int(cache_quota / 2)))
+                casd_args.append("--quota-high={}".format(int(cache_quota)))
+                casd_args.append("--quota-low={}".format(int(cache_quota / 2)))
 
                 if protect_session_blobs:
-                    casd_args.append('--protect-session-blobs')
+                    casd_args.append("--protect-session-blobs")
 
             casd_args.append(path)
 
@@ -112,7 +111,8 @@ class CASCache():
                 # The frontend will take care of it if needed
                 with _signals.blocked([signal.SIGINT], ignore=False):
                     self._casd_process = subprocess.Popen(
-                        casd_args, cwd=path, stdout=logfile_fp, stderr=subprocess.STDOUT)
+                        casd_args, cwd=path, stdout=logfile_fp, stderr=subprocess.STDOUT
+                    )
 
             self._cache_usage_monitor = _CASCacheUsageMonitor(self)
         else:
@@ -123,16 +123,16 @@ class CASCache():
 
         # Popen objects are not pickle-able, however, child processes only
         # need the information whether a casd subprocess was started or not.
-        assert '_casd_process' in state
-        state['_casd_process'] = bool(self._casd_process)
+        assert "_casd_process" in state
+        state["_casd_process"] = bool(self._casd_process)
 
         # The usage monitor is not pickle-able, but we also don't need it in
         # child processes currently. Make sure that if this changes, we get a
         # bug report, by setting _cache_usage_monitor_forbidden.
-        assert '_cache_usage_monitor' in state
-        assert '_cache_usage_monitor_forbidden' in state
-        state['_cache_usage_monitor'] = None
-        state['_cache_usage_monitor_forbidden'] = True
+        assert "_cache_usage_monitor" in state
+        assert "_cache_usage_monitor_forbidden" in state
+        state["_cache_usage_monitor"] = None
+        state["_cache_usage_monitor_forbidden"] = True
 
         return state
 
@@ -148,7 +148,7 @@ class CASCache():
 
                 time.sleep(0.01)
 
-            self._casd_channel = grpc.insecure_channel('unix:' + self._casd_socket_path)
+            self._casd_channel = grpc.insecure_channel("unix:" + self._casd_socket_path)
             self._casd_cas = remote_execution_pb2_grpc.ContentAddressableStorageStub(self._casd_channel)
             self._local_cas = local_cas_pb2_grpc.LocalContentAddressableStorageStub(self._casd_channel)
 
@@ -179,8 +179,8 @@ class CASCache():
     # Preflight check.
     #
     def preflight(self):
-        headdir = os.path.join(self.casdir, 'refs', 'heads')
-        objdir = os.path.join(self.casdir, 'objects')
+        headdir = os.path.join(self.casdir, "refs", "heads")
+        objdir = os.path.join(self.casdir, "objects")
         if not (os.path.isdir(headdir) and os.path.isdir(objdir)):
             raise CASCacheError("CAS repository check failed for '{}'".format(self.casdir))
 
@@ -285,7 +285,7 @@ class CASCache():
 
         directory = remote_execution_pb2.Directory()
 
-        with open(self.objpath(tree), 'rb') as f:
+        with open(self.objpath(tree), "rb") as f:
             directory.ParseFromString(f.read())
 
         for filenode in directory.files:
@@ -297,8 +297,16 @@ class CASCache():
                 utils.safe_copy(self.objpath(filenode.digest), fullpath)
 
             if filenode.is_executable:
-                os.chmod(fullpath, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
-                         stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
+                os.chmod(
+                    fullpath,
+                    stat.S_IRUSR
+                    | stat.S_IWUSR
+                    | stat.S_IXUSR
+                    | stat.S_IRGRP
+                    | stat.S_IXGRP
+                    | stat.S_IROTH
+                    | stat.S_IXOTH,
+                )
 
         for dirnode in directory.directories:
             fullpath = os.path.join(dest, dirnode.name)
@@ -365,7 +373,7 @@ class CASCache():
     #     (str): The path of the object
     #
     def objpath(self, digest):
-        return os.path.join(self.casdir, 'objects', digest.hash[:2], digest.hash[2:])
+        return os.path.join(self.casdir, "objects", digest.hash[:2], digest.hash[2:])
 
     # add_object():
     #
@@ -450,7 +458,7 @@ class CASCache():
 
         treepath = self.objpath(tree_response.tree_digest)
         tree = remote_execution_pb2.Tree()
-        with open(treepath, 'rb') as f:
+        with open(treepath, "rb") as f:
             tree.ParseFromString(f.read())
 
         root_directory = tree.root.SerializeToString()
@@ -467,7 +475,7 @@ class CASCache():
     def set_ref(self, ref, tree):
         refpath = self._refpath(ref)
         os.makedirs(os.path.dirname(refpath), exist_ok=True)
-        with utils.save_file_atomic(refpath, 'wb', tempdir=self.tmpdir) as f:
+        with utils.save_file_atomic(refpath, "wb", tempdir=self.tmpdir) as f:
             f.write(tree.SerializeToString())
 
     # resolve_ref():
@@ -485,7 +493,7 @@ class CASCache():
         refpath = self._refpath(ref)
 
         try:
-            with open(refpath, 'rb') as f:
+            with open(refpath, "rb") as f:
                 if update_mtime:
                     os.utime(refpath)
 
@@ -521,7 +529,7 @@ class CASCache():
     def remove(self, ref, *, basedir=None):
 
         if basedir is None:
-            basedir = os.path.join(self.casdir, 'refs', 'heads')
+            basedir = os.path.join(self.casdir, "refs", "heads")
         # Remove cache ref
         self._remove_ref(ref, basedir)
 
@@ -611,7 +619,7 @@ class CASCache():
 
         directory = remote_execution_pb2.Directory()
 
-        with open(self.objpath(directory_digest), 'rb') as f:
+        with open(self.objpath(directory_digest), "rb") as f:
             directory.ParseFromString(f.read())
 
         for filenode in directory.files:
@@ -626,21 +634,19 @@ class CASCache():
         dir_b = remote_execution_pb2.Directory()
 
         if tree_a:
-            with open(self.objpath(tree_a), 'rb') as f:
+            with open(self.objpath(tree_a), "rb") as f:
                 dir_a.ParseFromString(f.read())
         if tree_b:
-            with open(self.objpath(tree_b), 'rb') as f:
+            with open(self.objpath(tree_b), "rb") as f:
                 dir_b.ParseFromString(f.read())
 
         a = 0
         b = 0
         while a < len(dir_a.files) or b < len(dir_b.files):
-            if b < len(dir_b.files) and (a >= len(dir_a.files) or
-                                         dir_a.files[a].name > dir_b.files[b].name):
+            if b < len(dir_b.files) and (a >= len(dir_a.files) or dir_a.files[a].name > dir_b.files[b].name):
                 added.append(os.path.join(path, dir_b.files[b].name))
                 b += 1
-            elif a < len(dir_a.files) and (b >= len(dir_b.files) or
-                                           dir_b.files[b].name > dir_a.files[a].name):
+            elif a < len(dir_a.files) and (b >= len(dir_b.files) or dir_b.files[b].name > dir_a.files[a].name):
                 removed.append(os.path.join(path, dir_a.files[a].name))
                 a += 1
             else:
@@ -653,24 +659,41 @@ class CASCache():
         a = 0
         b = 0
         while a < len(dir_a.directories) or b < len(dir_b.directories):
-            if b < len(dir_b.directories) and (a >= len(dir_a.directories) or
-                                               dir_a.directories[a].name > dir_b.directories[b].name):
-                self.diff_trees(None, dir_b.directories[b].digest,
-                                added=added, removed=removed, modified=modified,
-                                path=os.path.join(path, dir_b.directories[b].name))
+            if b < len(dir_b.directories) and (
+                a >= len(dir_a.directories) or dir_a.directories[a].name > dir_b.directories[b].name
+            ):
+                self.diff_trees(
+                    None,
+                    dir_b.directories[b].digest,
+                    added=added,
+                    removed=removed,
+                    modified=modified,
+                    path=os.path.join(path, dir_b.directories[b].name),
+                )
                 b += 1
-            elif a < len(dir_a.directories) and (b >= len(dir_b.directories) or
-                                                 dir_b.directories[b].name > dir_a.directories[a].name):
-                self.diff_trees(dir_a.directories[a].digest, None,
-                                added=added, removed=removed, modified=modified,
-                                path=os.path.join(path, dir_a.directories[a].name))
+            elif a < len(dir_a.directories) and (
+                b >= len(dir_b.directories) or dir_b.directories[b].name > dir_a.directories[a].name
+            ):
+                self.diff_trees(
+                    dir_a.directories[a].digest,
+                    None,
+                    added=added,
+                    removed=removed,
+                    modified=modified,
+                    path=os.path.join(path, dir_a.directories[a].name),
+                )
                 a += 1
             else:
                 # Subdirectory exists in both directories
                 if dir_a.directories[a].digest.hash != dir_b.directories[b].digest.hash:
-                    self.diff_trees(dir_a.directories[a].digest, dir_b.directories[b].digest,
-                                    added=added, removed=removed, modified=modified,
-                                    path=os.path.join(path, dir_a.directories[a].name))
+                    self.diff_trees(
+                        dir_a.directories[a].digest,
+                        dir_b.directories[b].digest,
+                        added=added,
+                        removed=removed,
+                        modified=modified,
+                        path=os.path.join(path, dir_a.directories[a].name),
+                    )
                 a += 1
                 b += 1
 
@@ -703,7 +726,7 @@ class CASCache():
         return os.path.join(log_dir, str(self._casd_start_time) + ".log")
 
     def _refpath(self, ref):
-        return os.path.join(self.casdir, 'refs', 'heads', ref)
+        return os.path.join(self.casdir, "refs", "heads", ref)
 
     # _remove_ref()
     #
@@ -763,7 +786,7 @@ class CASCache():
 
         directory = remote_execution_pb2.Directory()
 
-        with open(self.objpath(tree), 'rb') as f:
+        with open(self.objpath(tree), "rb") as f:
             directory.ParseFromString(f.read())
 
         for dirnode in directory.directories:
@@ -783,7 +806,7 @@ class CASCache():
 
             directory = remote_execution_pb2.Directory()
 
-            with open(self.objpath(tree), 'rb') as f:
+            with open(self.objpath(tree), "rb") as f:
                 directory.ParseFromString(f.read())
 
         except FileNotFoundError:
@@ -813,8 +836,7 @@ class CASCache():
     @contextlib.contextmanager
     def _temporary_object(self):
         with utils._tempnamedfile(dir=self.tmpdir) as f:
-            os.chmod(f.name,
-                     stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
+            os.chmod(f.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
             yield f
 
     # _ensure_blob():
@@ -898,12 +920,13 @@ class CASCache():
             objpath = self._ensure_blob(remote, dir_digest)
 
             directory = remote_execution_pb2.Directory()
-            with open(objpath, 'rb') as f:
+            with open(objpath, "rb") as f:
                 directory.ParseFromString(f.read())
 
             for dirnode in directory.directories:
-                batch = self._fetch_directory_node(remote, dirnode.digest, batch,
-                                                   fetch_queue, fetch_next_queue, recursive=True)
+                batch = self._fetch_directory_node(
+                    remote, dirnode.digest, batch, fetch_queue, fetch_next_queue, recursive=True
+                )
 
         # Fetch final batch
         self._fetch_directory_batch(remote, batch, fetch_queue, fetch_next_queue)
@@ -913,7 +936,7 @@ class CASCache():
 
         tree = remote_execution_pb2.Tree()
 
-        with open(objpath, 'rb') as f:
+        with open(objpath, "rb") as f:
             tree.ParseFromString(f.read())
 
         tree.children.extend([tree.root])
@@ -1062,8 +1085,7 @@ class CASCache():
 #    used_size (int): Total size used by the local cache, in bytes.
 #    quota_size (int): Disk quota for the local cache, in bytes.
 #
-class _CASCacheUsage():
-
+class _CASCacheUsage:
     def __init__(self, used_size, quota_size):
         self.used_size = used_size
         self.quota_size = quota_size
@@ -1080,10 +1102,11 @@ class _CASCacheUsage():
         elif self.quota_size is None:
             return utils._pretty_size(self.used_size, dec_places=1)
         else:
-            return "{} / {} ({}%)" \
-                .format(utils._pretty_size(self.used_size, dec_places=1),
-                        utils._pretty_size(self.quota_size, dec_places=1),
-                        self.used_percent)
+            return "{} / {} ({}%)".format(
+                utils._pretty_size(self.used_size, dec_places=1),
+                utils._pretty_size(self.quota_size, dec_places=1),
+                self.used_percent,
+            )
 
 
 # _CASCacheUsageMonitor
diff --git a/src/buildstream/_cas/casremote.py b/src/buildstream/_cas/casremote.py
index a054b28..ee6f467 100644
--- a/src/buildstream/_cas/casremote.py
+++ b/src/buildstream/_cas/casremote.py
@@ -32,7 +32,6 @@ _MAX_DIGESTS = _MAX_PAYLOAD_BYTES / 80
 
 
 class BlobNotFound(CASRemoteError):
-
     def __init__(self, blob, msg):
         self.blob = blob
         super().__init__(msg)
@@ -41,7 +40,6 @@ class BlobNotFound(CASRemoteError):
 # Represents a single remote CAS cache.
 #
 class CASRemote(BaseRemote):
-
     def __init__(self, spec, cascache, **kwargs):
         super().__init__(spec, **kwargs)
 
@@ -90,7 +88,7 @@ class CASRemote(BaseRemote):
 
 # Represents a batch of blobs queued for fetching.
 #
-class _CASBatchRead():
+class _CASBatchRead:
     def __init__(self, remote):
         self._remote = remote
         self._requests = []
@@ -123,22 +121,28 @@ class _CASBatchRead():
             for response in batch_response.responses:
                 if response.status.code == code_pb2.NOT_FOUND:
                     if missing_blobs is None:
-                        raise BlobNotFound(response.digest.hash, "Failed to download blob {}: {}".format(
-                            response.digest.hash, response.status.code))
+                        raise BlobNotFound(
+                            response.digest.hash,
+                            "Failed to download blob {}: {}".format(response.digest.hash, response.status.code),
+                        )
 
                     missing_blobs.append(response.digest)
 
                 if response.status.code != code_pb2.OK:
-                    raise CASRemoteError("Failed to download blob {}: {}".format(
-                        response.digest.hash, response.status.code))
+                    raise CASRemoteError(
+                        "Failed to download blob {}: {}".format(response.digest.hash, response.status.code)
+                    )
                 if response.digest.size_bytes != len(response.data):
-                    raise CASRemoteError("Failed to download blob {}: expected {} bytes, received {} bytes".format(
-                        response.digest.hash, response.digest.size_bytes, len(response.data)))
+                    raise CASRemoteError(
+                        "Failed to download blob {}: expected {} bytes, received {} bytes".format(
+                            response.digest.hash, response.digest.size_bytes, len(response.data)
+                        )
+                    )
 
 
 # Represents a batch of blobs queued for upload.
 #
-class _CASBatchUpdate():
+class _CASBatchUpdate:
     def __init__(self, remote):
         self._remote = remote
         self._requests = []
@@ -175,5 +179,7 @@ class _CASBatchUpdate():
                     else:
                         reason = None
 
-                    raise CASRemoteError("Failed to upload blob {}: {}".format(
-                        response.digest.hash, response.status.code), reason=reason)
+                    raise CASRemoteError(
+                        "Failed to upload blob {}: {}".format(response.digest.hash, response.status.code),
+                        reason=reason,
+                    )
diff --git a/src/buildstream/_cas/casserver.py b/src/buildstream/_cas/casserver.py
index d424143..a2110d8 100644
--- a/src/buildstream/_cas/casserver.py
+++ b/src/buildstream/_cas/casserver.py
@@ -33,8 +33,14 @@ import click
 from .._protos.build.bazel.remote.execution.v2 import remote_execution_pb2, remote_execution_pb2_grpc
 from .._protos.google.bytestream import bytestream_pb2, bytestream_pb2_grpc
 from .._protos.google.rpc import code_pb2
-from .._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
-    artifact_pb2, artifact_pb2_grpc, source_pb2, source_pb2_grpc
+from .._protos.buildstream.v2 import (
+    buildstream_pb2,
+    buildstream_pb2_grpc,
+    artifact_pb2,
+    artifact_pb2_grpc,
+    source_pb2,
+    source_pb2_grpc,
+)
 
 from .. import utils
 from .._exceptions import CASError, CASCacheError
@@ -61,8 +67,8 @@ def create_server(repo, *, enable_push, quota, index_only):
     cas = CASCache(os.path.abspath(repo), cache_quota=quota, protect_session_blobs=False)
 
     try:
-        artifactdir = os.path.join(os.path.abspath(repo), 'artifacts', 'refs')
-        sourcedir = os.path.join(os.path.abspath(repo), 'source_protos')
+        artifactdir = os.path.join(os.path.abspath(repo), "artifacts", "refs")
+        sourcedir = os.path.join(os.path.abspath(repo), "source_protos")
 
         # Use max_workers default from Python 3.5+
         max_workers = (os.cpu_count() or 1) * 5
@@ -70,31 +76,31 @@ def create_server(repo, *, enable_push, quota, index_only):
 
         if not index_only:
             bytestream_pb2_grpc.add_ByteStreamServicer_to_server(
-                _ByteStreamServicer(cas, enable_push=enable_push), server)
+                _ByteStreamServicer(cas, enable_push=enable_push), server
+            )
 
             remote_execution_pb2_grpc.add_ContentAddressableStorageServicer_to_server(
-                _ContentAddressableStorageServicer(cas, enable_push=enable_push), server)
+                _ContentAddressableStorageServicer(cas, enable_push=enable_push), server
+            )
 
-        remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(
-            _CapabilitiesServicer(), server)
+        remote_execution_pb2_grpc.add_CapabilitiesServicer_to_server(_CapabilitiesServicer(), server)
 
         buildstream_pb2_grpc.add_ReferenceStorageServicer_to_server(
-            _ReferenceStorageServicer(cas, enable_push=enable_push), server)
+            _ReferenceStorageServicer(cas, enable_push=enable_push), server
+        )
 
         artifact_pb2_grpc.add_ArtifactServiceServicer_to_server(
-            _ArtifactServicer(cas, artifactdir, update_cas=not index_only), server)
+            _ArtifactServicer(cas, artifactdir, update_cas=not index_only), server
+        )
 
-        source_pb2_grpc.add_SourceServiceServicer_to_server(
-            _SourceServicer(sourcedir), server)
+        source_pb2_grpc.add_SourceServiceServicer_to_server(_SourceServicer(sourcedir), server)
 
         # Create up reference storage and artifact capabilities
-        artifact_capabilities = buildstream_pb2.ArtifactCapabilities(
-            allow_updates=enable_push)
-        source_capabilities = buildstream_pb2.SourceCapabilities(
-            allow_updates=enable_push)
+        artifact_capabilities = buildstream_pb2.ArtifactCapabilities(allow_updates=enable_push)
+        source_capabilities = buildstream_pb2.SourceCapabilities(allow_updates=enable_push)
         buildstream_pb2_grpc.add_CapabilitiesServicer_to_server(
-            _BuildStreamCapabilitiesServicer(artifact_capabilities, source_capabilities),
-            server)
+            _BuildStreamCapabilitiesServicer(artifact_capabilities, source_capabilities), server
+        )
 
         yield server
 
@@ -103,28 +109,25 @@ def create_server(repo, *, enable_push, quota, index_only):
 
 
 @click.command(short_help="CAS Artifact Server")
-@click.option('--port', '-p', type=click.INT, required=True, help="Port number")
-@click.option('--server-key', help="Private server key for TLS (PEM-encoded)")
-@click.option('--server-cert', help="Public server certificate for TLS (PEM-encoded)")
-@click.option('--client-certs', help="Public client certificates for TLS (PEM-encoded)")
-@click.option('--enable-push', is_flag=True,
-              help="Allow clients to upload blobs and update artifact cache")
-@click.option('--quota', type=click.INT, default=10e9, show_default=True,
-              help="Maximum disk usage in bytes")
-@click.option('--index-only', is_flag=True,
-              help="Only provide the BuildStream artifact and source services (\"index\"), not the CAS (\"storage\")")
-@click.argument('repo')
-def server_main(repo, port, server_key, server_cert, client_certs, enable_push,
-                quota, index_only):
+@click.option("--port", "-p", type=click.INT, required=True, help="Port number")
+@click.option("--server-key", help="Private server key for TLS (PEM-encoded)")
+@click.option("--server-cert", help="Public server certificate for TLS (PEM-encoded)")
+@click.option("--client-certs", help="Public client certificates for TLS (PEM-encoded)")
+@click.option("--enable-push", is_flag=True, help="Allow clients to upload blobs and update artifact cache")
+@click.option("--quota", type=click.INT, default=10e9, show_default=True, help="Maximum disk usage in bytes")
+@click.option(
+    "--index-only",
+    is_flag=True,
+    help='Only provide the BuildStream artifact and source services ("index"), not the CAS ("storage")',
+)
+@click.argument("repo")
+def server_main(repo, port, server_key, server_cert, client_certs, enable_push, quota, index_only):
     # Handle SIGTERM by calling sys.exit(0), which will raise a SystemExit exception,
     # properly executing cleanup code in `finally` clauses and context managers.
     # This is required to terminate buildbox-casd on SIGTERM.
     signal.signal(signal.SIGTERM, lambda signalnum, frame: sys.exit(0))
 
-    with create_server(repo,
-                       quota=quota,
-                       enable_push=enable_push,
-                       index_only=index_only) as server:
+    with create_server(repo, quota=quota, enable_push=enable_push, index_only=index_only) as server:
 
         use_tls = bool(server_key)
 
@@ -138,23 +141,25 @@ def server_main(repo, port, server_key, server_cert, client_certs, enable_push,
 
         if use_tls:
             # Read public/private key pair
-            with open(server_key, 'rb') as f:
+            with open(server_key, "rb") as f:
                 server_key_bytes = f.read()
-            with open(server_cert, 'rb') as f:
+            with open(server_cert, "rb") as f:
                 server_cert_bytes = f.read()
 
             if client_certs:
-                with open(client_certs, 'rb') as f:
+                with open(client_certs, "rb") as f:
                     client_certs_bytes = f.read()
             else:
                 client_certs_bytes = None
 
-            credentials = grpc.ssl_server_credentials([(server_key_bytes, server_cert_bytes)],
-                                                      root_certificates=client_certs_bytes,
-                                                      require_client_auth=bool(client_certs))
-            server.add_secure_port('[::]:{}'.format(port), credentials)
+            credentials = grpc.ssl_server_credentials(
+                [(server_key_bytes, server_cert_bytes)],
+                root_certificates=client_certs_bytes,
+                require_client_auth=bool(client_certs),
+            )
+            server.add_secure_port("[::]:{}".format(port), credentials)
         else:
-            server.add_insecure_port('[::]:{}'.format(port))
+            server.add_insecure_port("[::]:{}".format(port))
 
         # Run artifact server
         server.start()
@@ -183,7 +188,7 @@ class _ByteStreamServicer(bytestream_pb2_grpc.ByteStreamServicer):
             return
 
         try:
-            with open(self.cas.objpath(client_digest), 'rb') as f:
+            with open(self.cas.objpath(client_digest), "rb") as f:
                 if os.fstat(f.fileno()).st_size != client_digest.size_bytes:
                     context.set_code(grpc.StatusCode.NOT_FOUND)
                     return
@@ -317,7 +322,7 @@ class _ContentAddressableStorageServicer(remote_execution_pb2_grpc.ContentAddres
             blob_response.digest.size_bytes = digest.size_bytes
             try:
                 objpath = self.cas.objpath(digest)
-                with open(objpath, 'rb') as f:
+                with open(objpath, "rb") as f:
                     if os.fstat(f.fileno()).st_size != digest.size_bytes:
                         blob_response.status.code = code_pb2.NOT_FOUND
                         continue
@@ -437,7 +442,6 @@ class _ReferenceStorageServicer(buildstream_pb2_grpc.ReferenceStorageServicer):
 
 
 class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
-
     def __init__(self, cas, artifactdir, *, update_cas=True):
         super().__init__()
         self.cas = cas
@@ -451,7 +455,7 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
             context.abort(grpc.StatusCode.NOT_FOUND, "Artifact proto not found")
 
         artifact = artifact_pb2.Artifact()
-        with open(artifact_path, 'rb') as f:
+        with open(artifact_path, "rb") as f:
             artifact.ParseFromString(f.read())
 
         # Artifact-only servers will not have blobs on their system,
@@ -489,11 +493,9 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
 
         except FileNotFoundError:
             os.unlink(artifact_path)
-            context.abort(grpc.StatusCode.NOT_FOUND,
-                          "Artifact files incomplete")
+            context.abort(grpc.StatusCode.NOT_FOUND, "Artifact files incomplete")
         except DecodeError:
-            context.abort(grpc.StatusCode.NOT_FOUND,
-                          "Artifact files not valid")
+            context.abort(grpc.StatusCode.NOT_FOUND, "Artifact files not valid")
 
         return artifact
 
@@ -516,7 +518,7 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
         # Add the artifact proto to the cas
         artifact_path = os.path.join(self.artifactdir, request.cache_key)
         os.makedirs(os.path.dirname(artifact_path), exist_ok=True)
-        with utils.save_file_atomic(artifact_path, mode='wb') as f:
+        with utils.save_file_atomic(artifact_path, mode="wb") as f:
             f.write(artifact.SerializeToString())
 
         return artifact
@@ -527,19 +529,18 @@ class _ArtifactServicer(artifact_pb2_grpc.ArtifactServiceServicer):
     def _check_directory(self, name, digest, context):
         try:
             directory = remote_execution_pb2.Directory()
-            with open(self.cas.objpath(digest), 'rb') as f:
+            with open(self.cas.objpath(digest), "rb") as f:
                 directory.ParseFromString(f.read())
         except FileNotFoundError:
-            context.abort(grpc.StatusCode.FAILED_PRECONDITION,
-                          "Artifact {} specified but no files found".format(name))
+            context.abort(grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but no files found".format(name))
         except DecodeError:
-            context.abort(grpc.StatusCode.FAILED_PRECONDITION,
-                          "Artifact {} specified but directory not found".format(name))
+            context.abort(
+                grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but directory not found".format(name)
+            )
 
     def _check_file(self, name, digest, context):
         if not os.path.exists(self.cas.objpath(digest)):
-            context.abort(grpc.StatusCode.FAILED_PRECONDITION,
-                          "Artifact {} specified but not found".format(name))
+            context.abort(grpc.StatusCode.FAILED_PRECONDITION, "Artifact {} specified but not found".format(name))
 
 
 class _BuildStreamCapabilitiesServicer(buildstream_pb2_grpc.CapabilitiesServicer):
@@ -564,8 +565,7 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
         except FileNotFoundError:
             context.abort(grpc.StatusCode.NOT_FOUND, "Source not found")
         except DecodeError:
-            context.abort(grpc.StatusCode.NOT_FOUND,
-                          "Sources gives invalid directory")
+            context.abort(grpc.StatusCode.NOT_FOUND, "Sources gives invalid directory")
 
         return source_proto
 
@@ -576,7 +576,7 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
     def _get_source(self, cache_key):
         path = os.path.join(self.sourcedir, cache_key)
         source_proto = source_pb2.Source()
-        with open(path, 'r+b') as f:
+        with open(path, "r+b") as f:
             source_proto.ParseFromString(f.read())
             os.utime(path)
             return source_proto
@@ -584,18 +584,18 @@ class _SourceServicer(source_pb2_grpc.SourceServiceServicer):
     def _set_source(self, cache_key, source_proto):
         path = os.path.join(self.sourcedir, cache_key)
         os.makedirs(os.path.dirname(path), exist_ok=True)
-        with utils.save_file_atomic(path, 'w+b') as f:
+        with utils.save_file_atomic(path, "w+b") as f:
             f.write(source_proto.SerializeToString())
 
 
 def _digest_from_download_resource_name(resource_name):
-    parts = resource_name.split('/')
+    parts = resource_name.split("/")
 
     # Accept requests from non-conforming BuildStream 1.1.x clients
     if len(parts) == 2:
-        parts.insert(0, 'blobs')
+        parts.insert(0, "blobs")
 
-    if len(parts) != 3 or parts[0] != 'blobs':
+    if len(parts) != 3 or parts[0] != "blobs":
         return None
 
     try:
@@ -608,15 +608,15 @@ def _digest_from_download_resource_name(resource_name):
 
 
 def _digest_from_upload_resource_name(resource_name):
-    parts = resource_name.split('/')
+    parts = resource_name.split("/")
 
     # Accept requests from non-conforming BuildStream 1.1.x clients
     if len(parts) == 2:
-        parts.insert(0, 'uploads')
+        parts.insert(0, "uploads")
         parts.insert(1, str(uuid.uuid4()))
-        parts.insert(2, 'blobs')
+        parts.insert(2, "blobs")
 
-    if len(parts) < 5 or parts[0] != 'uploads' or parts[2] != 'blobs':
+    if len(parts) < 5 or parts[0] != "uploads" or parts[2] != "blobs":
         return None
 
     try:
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 4e1007e..f426f4b 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -46,13 +46,12 @@ from .sandbox import SandboxRemote
 # verbosity levels and basically anything pertaining to the context
 # in which BuildStream was invoked.
 #
-class Context():
-
+class Context:
     def __init__(self, *, use_casd=True):
 
         # Whether we are running as part of a test suite. This is only relevant
         # for developing BuildStream itself.
-        self.is_running_in_test_suite = 'BST_TEST_SUITE' in os.environ
+        self.is_running_in_test_suite = "BST_TEST_SUITE" in os.environ
 
         # Filename indicating which configuration file was used, or None for the defaults
         self.config_origin = None
@@ -216,8 +215,7 @@ class Context():
         # a $XDG_CONFIG_HOME/buildstream.conf file
         #
         if not config:
-            default_config = os.path.join(os.environ['XDG_CONFIG_HOME'],
-                                          'buildstream.conf')
+            default_config = os.path.join(os.environ["XDG_CONFIG_HOME"], "buildstream.conf")
             if os.path.exists(default_config):
                 config = default_config
 
@@ -231,19 +229,32 @@ class Context():
             user_config._composite(defaults)
 
         # Give obsoletion warnings
-        if 'builddir' in defaults:
+        if "builddir" in defaults:
             raise LoadError("builddir is obsolete, use cachedir", LoadErrorReason.INVALID_DATA)
 
-        if 'artifactdir' in defaults:
+        if "artifactdir" in defaults:
             raise LoadError("artifactdir is obsolete", LoadErrorReason.INVALID_DATA)
 
-        defaults.validate_keys([
-            'cachedir', 'sourcedir', 'builddir', 'logdir', 'scheduler', 'build',
-            'artifacts', 'source-caches', 'logging', 'projects', 'cache', 'prompt',
-            'workspacedir', 'remote-execution',
-        ])
-
-        for directory in ['cachedir', 'sourcedir', 'logdir', 'workspacedir']:
+        defaults.validate_keys(
+            [
+                "cachedir",
+                "sourcedir",
+                "builddir",
+                "logdir",
+                "scheduler",
+                "build",
+                "artifacts",
+                "source-caches",
+                "logging",
+                "projects",
+                "cache",
+                "prompt",
+                "workspacedir",
+                "remote-execution",
+            ]
+        )
+
+        for directory in ["cachedir", "sourcedir", "logdir", "workspacedir"]:
             # Allow the ~ tilde expansion and any environment variables in
             # path specification in the config files.
             #
@@ -256,25 +267,23 @@ class Context():
             # Relative paths don't make sense in user configuration. The exception is
             # workspacedir where `.` is useful as it will be combined with the name
             # specified on the command line.
-            if not os.path.isabs(path) and not (directory == 'workspacedir' and path == '.'):
+            if not os.path.isabs(path) and not (directory == "workspacedir" and path == "."):
                 raise LoadError("{} must be an absolute path".format(directory), LoadErrorReason.INVALID_DATA)
 
         # add directories not set by users
-        self.tmpdir = os.path.join(self.cachedir, 'tmp')
-        self.casdir = os.path.join(self.cachedir, 'cas')
-        self.builddir = os.path.join(self.cachedir, 'build')
-        self.artifactdir = os.path.join(self.cachedir, 'artifacts', 'refs')
+        self.tmpdir = os.path.join(self.cachedir, "tmp")
+        self.casdir = os.path.join(self.cachedir, "cas")
+        self.builddir = os.path.join(self.cachedir, "build")
+        self.artifactdir = os.path.join(self.cachedir, "artifacts", "refs")
 
         # Move old artifact cas to cas if it exists and create symlink
-        old_casdir = os.path.join(self.cachedir, 'artifacts', 'cas')
-        if (os.path.exists(old_casdir) and not os.path.islink(old_casdir) and
-                not os.path.exists(self.casdir)):
+        old_casdir = os.path.join(self.cachedir, "artifacts", "cas")
+        if os.path.exists(old_casdir) and not os.path.islink(old_casdir) and not os.path.exists(self.casdir):
             os.rename(old_casdir, self.casdir)
             os.symlink(self.casdir, old_casdir)
 
         # Cleanup old extract directories
-        old_extractdirs = [os.path.join(self.cachedir, 'artifacts', 'extract'),
-                           os.path.join(self.cachedir, 'extract')]
+        old_extractdirs = [os.path.join(self.cachedir, "artifacts", "extract"), os.path.join(self.cachedir, "extract")]
         for old_extractdir in old_extractdirs:
             if os.path.isdir(old_extractdir):
                 shutil.rmtree(old_extractdir, ignore_errors=True)
@@ -282,21 +291,22 @@ class Context():
         # Load quota configuration
         # We need to find the first existing directory in the path of our
         # casdir - the casdir may not have been created yet.
-        cache = defaults.get_mapping('cache')
-        cache.validate_keys(['quota', 'pull-buildtrees', 'cache-buildtrees'])
+        cache = defaults.get_mapping("cache")
+        cache.validate_keys(["quota", "pull-buildtrees", "cache-buildtrees"])
 
         cas_volume = self.casdir
         while not os.path.exists(cas_volume):
             cas_volume = os.path.dirname(cas_volume)
 
-        self.config_cache_quota_string = cache.get_str('quota')
+        self.config_cache_quota_string = cache.get_str("quota")
         try:
-            self.config_cache_quota = utils._parse_size(self.config_cache_quota_string,
-                                                        cas_volume)
+            self.config_cache_quota = utils._parse_size(self.config_cache_quota_string, cas_volume)
         except utils.UtilError as e:
-            raise LoadError("{}\nPlease specify the value in bytes or as a % of full disk space.\n"
-                            "\nValid values are, for example: 800M 10G 1T 50%\n"
-                            .format(str(e)), LoadErrorReason.INVALID_DATA) from e
+            raise LoadError(
+                "{}\nPlease specify the value in bytes or as a % of full disk space.\n"
+                "\nValid values are, for example: 800M 10G 1T 50%\n".format(str(e)),
+                LoadErrorReason.INVALID_DATA,
+            ) from e
 
         # Load artifact share configuration
         self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
@@ -305,73 +315,70 @@ class Context():
         self.source_cache_specs = SourceCache.specs_from_config_node(defaults)
 
         # Load remote execution config getting pull-artifact-files from it
-        remote_execution = defaults.get_mapping('remote-execution', default=None)
+        remote_execution = defaults.get_mapping("remote-execution", default=None)
         if remote_execution:
-            self.pull_artifact_files = remote_execution.get_bool('pull-artifact-files', default=True)
+            self.pull_artifact_files = remote_execution.get_bool("pull-artifact-files", default=True)
             # This stops it being used in the remote service set up
-            remote_execution.safe_del('pull-artifact-files')
+            remote_execution.safe_del("pull-artifact-files")
             # Don't pass the remote execution settings if that was the only option
             if remote_execution.keys() == []:
-                del defaults['remote-execution']
+                del defaults["remote-execution"]
         else:
             self.pull_artifact_files = True
 
         self.remote_execution_specs = SandboxRemote.specs_from_config_node(defaults)
 
         # Load pull build trees configuration
-        self.pull_buildtrees = cache.get_bool('pull-buildtrees')
+        self.pull_buildtrees = cache.get_bool("pull-buildtrees")
 
         # Load cache build trees configuration
-        self.cache_buildtrees = cache.get_enum('cache-buildtrees', _CacheBuildTrees)
+        self.cache_buildtrees = cache.get_enum("cache-buildtrees", _CacheBuildTrees)
 
         # Load logging config
-        logging = defaults.get_mapping('logging')
-        logging.validate_keys([
-            'key-length', 'verbose',
-            'error-lines', 'message-lines',
-            'debug', 'element-format', 'message-format'
-        ])
-        self.log_key_length = logging.get_int('key-length')
-        self.log_debug = logging.get_bool('debug')
-        self.log_verbose = logging.get_bool('verbose')
-        self.log_error_lines = logging.get_int('error-lines')
-        self.log_message_lines = logging.get_int('message-lines')
-        self.log_element_format = logging.get_str('element-format')
-        self.log_message_format = logging.get_str('message-format')
+        logging = defaults.get_mapping("logging")
+        logging.validate_keys(
+            ["key-length", "verbose", "error-lines", "message-lines", "debug", "element-format", "message-format"]
+        )
+        self.log_key_length = logging.get_int("key-length")
+        self.log_debug = logging.get_bool("debug")
+        self.log_verbose = logging.get_bool("verbose")
+        self.log_error_lines = logging.get_int("error-lines")
+        self.log_message_lines = logging.get_int("message-lines")
+        self.log_element_format = logging.get_str("element-format")
+        self.log_message_format = logging.get_str("message-format")
 
         # Load scheduler config
-        scheduler = defaults.get_mapping('scheduler')
-        scheduler.validate_keys([
-            'on-error', 'fetchers', 'builders',
-            'pushers', 'network-retries'
-        ])
-        self.sched_error_action = scheduler.get_enum('on-error', _SchedulerErrorAction)
-        self.sched_fetchers = scheduler.get_int('fetchers')
-        self.sched_builders = scheduler.get_int('builders')
-        self.sched_pushers = scheduler.get_int('pushers')
-        self.sched_network_retries = scheduler.get_int('network-retries')
+        scheduler = defaults.get_mapping("scheduler")
+        scheduler.validate_keys(["on-error", "fetchers", "builders", "pushers", "network-retries"])
+        self.sched_error_action = scheduler.get_enum("on-error", _SchedulerErrorAction)
+        self.sched_fetchers = scheduler.get_int("fetchers")
+        self.sched_builders = scheduler.get_int("builders")
+        self.sched_pushers = scheduler.get_int("pushers")
+        self.sched_network_retries = scheduler.get_int("network-retries")
 
         # Load build config
-        build = defaults.get_mapping('build')
-        build.validate_keys(['max-jobs', 'dependencies'])
-        self.build_max_jobs = build.get_int('max-jobs')
-
-        self.build_dependencies = build.get_str('dependencies')
-        if self.build_dependencies not in ['plan', 'all']:
-            provenance = build.get_scalar('dependencies').get_provenance()
-            raise LoadError("{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'."
-                            .format(provenance), LoadErrorReason.INVALID_DATA)
+        build = defaults.get_mapping("build")
+        build.validate_keys(["max-jobs", "dependencies"])
+        self.build_max_jobs = build.get_int("max-jobs")
+
+        self.build_dependencies = build.get_str("dependencies")
+        if self.build_dependencies not in ["plan", "all"]:
+            provenance = build.get_scalar("dependencies").get_provenance()
+            raise LoadError(
+                "{}: Invalid value for 'dependencies'. Choose 'plan' or 'all'.".format(provenance),
+                LoadErrorReason.INVALID_DATA,
+            )
 
         # Load per-projects overrides
-        self._project_overrides = defaults.get_mapping('projects', default={})
+        self._project_overrides = defaults.get_mapping("projects", default={})
 
         # Shallow validation of overrides, parts of buildstream which rely
         # on the overrides are expected to validate elsewhere.
         for overrides_project in self._project_overrides.keys():
             overrides = self._project_overrides.get_mapping(overrides_project)
-            overrides.validate_keys(['artifacts', 'source-caches', 'options',
-                                     'strict', 'default-mirror',
-                                     'remote-execution'])
+            overrides.validate_keys(
+                ["artifacts", "source-caches", "options", "strict", "default-mirror", "remote-execution"]
+            )
 
     @property
     def platform(self):
@@ -474,7 +481,7 @@ class Context():
             # so work out if we should be strict, and then cache the result
             toplevel = self.get_toplevel_project()
             overrides = self.get_overrides(toplevel.name)
-            self._strict_build_plan = overrides.get_bool('strict', default=True)
+            self._strict_build_plan = overrides.get_bool("strict", default=True)
 
         # If it was set by the CLI, it overrides any config
         # Ditto if we've already computed this, then we return the computed
@@ -505,12 +512,12 @@ class Context():
     # preferred locations of things from user configuration
     # files.
     def _init_xdg(self):
-        if not os.environ.get('XDG_CACHE_HOME'):
-            os.environ['XDG_CACHE_HOME'] = os.path.expanduser('~/.cache')
-        if not os.environ.get('XDG_CONFIG_HOME'):
-            os.environ['XDG_CONFIG_HOME'] = os.path.expanduser('~/.config')
-        if not os.environ.get('XDG_DATA_HOME'):
-            os.environ['XDG_DATA_HOME'] = os.path.expanduser('~/.local/share')
+        if not os.environ.get("XDG_CACHE_HOME"):
+            os.environ["XDG_CACHE_HOME"] = os.path.expanduser("~/.cache")
+        if not os.environ.get("XDG_CONFIG_HOME"):
+            os.environ["XDG_CONFIG_HOME"] = os.path.expanduser("~/.config")
+        if not os.environ.get("XDG_DATA_HOME"):
+            os.environ["XDG_DATA_HOME"] = os.path.expanduser("~/.local/share")
 
     def get_cascache(self):
         if self._cascache is None:
@@ -521,10 +528,9 @@ class Context():
             else:
                 log_level = CASLogLevel.WARNING
 
-            self._cascache = CASCache(self.cachedir,
-                                      casd=self.use_casd,
-                                      cache_quota=self.config_cache_quota,
-                                      log_level=log_level)
+            self._cascache = CASCache(
+                self.cachedir, casd=self.use_casd, cache_quota=self.config_cache_quota, log_level=log_level
+            )
         return self._cascache
 
     # prepare_fork():
diff --git a/src/buildstream/_elementfactory.py b/src/buildstream/_elementfactory.py
index d6591bf..5d219c6 100644
--- a/src/buildstream/_elementfactory.py
+++ b/src/buildstream/_elementfactory.py
@@ -30,14 +30,15 @@ from .element import Element
 #     plugin_origins (list):    Data used to search for external Element plugins
 #
 class ElementFactory(PluginContext):
+    def __init__(self, plugin_base, *, format_versions={}, plugin_origins=None):
 
-    def __init__(self, plugin_base, *,
-                 format_versions={},
-                 plugin_origins=None):
-
-        super().__init__(plugin_base, Element, [_site.element_plugins],
-                         plugin_origins=plugin_origins,
-                         format_versions=format_versions)
+        super().__init__(
+            plugin_base,
+            Element,
+            [_site.element_plugins],
+            plugin_origins=plugin_origins,
+            format_versions=format_versions,
+        )
 
     # create():
     #
diff --git a/src/buildstream/_exceptions.py b/src/buildstream/_exceptions.py
index 947b831..48e249c 100644
--- a/src/buildstream/_exceptions.py
+++ b/src/buildstream/_exceptions.py
@@ -51,7 +51,7 @@ def get_last_exception():
 # Used by regression tests
 #
 def get_last_task_error():
-    if 'BST_TEST_SUITE' not in os.environ:
+    if "BST_TEST_SUITE" not in os.environ:
         raise BstError("Getting the last task error is only supported when running tests")
 
     global _last_task_error_domain
@@ -71,7 +71,7 @@ def get_last_task_error():
 # tests about how things failed in a machine readable way
 #
 def set_last_task_error(domain, reason):
-    if 'BST_TEST_SUITE' in os.environ:
+    if "BST_TEST_SUITE" in os.environ:
         global _last_task_error_domain
         global _last_task_error_reason
 
@@ -107,7 +107,6 @@ class ErrorDomain(Enum):
 # context can then be communicated back to the main process.
 #
 class BstError(Exception):
-
     def __init__(self, message, *, detail=None, domain=None, reason=None, temporary=False):
         global _last_exception
 
@@ -132,7 +131,7 @@ class BstError(Exception):
         self.reason = reason
 
         # Hold on to the last raised exception for testing purposes
-        if 'BST_TEST_SUITE' in os.environ:
+        if "BST_TEST_SUITE" in os.environ:
             _last_exception = self
 
 
@@ -329,7 +328,6 @@ class CASCacheError(CASError):
 # Raised from pipeline operations
 #
 class PipelineError(BstError):
-
     def __init__(self, message, *, detail=None, reason=None):
         super().__init__(message, detail=detail, domain=ErrorDomain.PIPELINE, reason=reason)
 
@@ -339,7 +337,6 @@ class PipelineError(BstError):
 # Raised when a stream operation fails
 #
 class StreamError(BstError):
-
     def __init__(self, message=None, *, detail=None, reason=None, terminated=False):
 
         # The empty string should never appear to a user,
diff --git a/src/buildstream/_frontend/app.py b/src/buildstream/_frontend/app.py
index 99e1643..0961085 100644
--- a/src/buildstream/_frontend/app.py
+++ b/src/buildstream/_frontend/app.py
@@ -56,19 +56,18 @@ INDENT = 4
 #    main_options (dict): The main CLI options of the `bst`
 #                         command, before any subcommand
 #
-class App():
-
+class App:
     def __init__(self, main_options):
 
         #
         # Public members
         #
-        self.context = None        # The Context object
-        self.stream = None         # The Stream object
-        self.project = None        # The toplevel Project object
-        self.logger = None         # The LogLine object
-        self.interactive = None    # Whether we are running in interactive mode
-        self.colors = None         # Whether to use colors in logging
+        self.context = None  # The Context object
+        self.stream = None  # The Stream object
+        self.project = None  # The toplevel Project object
+        self.logger = None  # The LogLine object
+        self.interactive = None  # Whether we are running in interactive mode
+        self.colors = None  # Whether to use colors in logging
 
         #
         # Private members
@@ -76,18 +75,18 @@ class App():
         self._session_start = datetime.datetime.now()
         self._session_name = None
         self._main_options = main_options  # Main CLI options, before any command
-        self._status = None                # The Status object
-        self._fail_messages = {}           # Failure messages by unique plugin id
+        self._status = None  # The Status object
+        self._fail_messages = {}  # Failure messages by unique plugin id
         self._interactive_failures = None  # Whether to handle failures interactively
-        self._started = False              # Whether a session has started
-        self._set_project_dir = False      # Whether -C option was used
-        self._state = None                 # Frontend reads this and registers callbacks
+        self._started = False  # Whether a session has started
+        self._set_project_dir = False  # Whether -C option was used
+        self._state = None  # Frontend reads this and registers callbacks
 
         # UI Colors Profiles
-        self._content_profile = Profile(fg='yellow')
-        self._format_profile = Profile(fg='cyan', dim=True)
-        self._success_profile = Profile(fg='green')
-        self._error_profile = Profile(fg='red', dim=True)
+        self._content_profile = Profile(fg="yellow")
+        self._format_profile = Profile(fg="cyan", dim=True)
+        self._success_profile = Profile(fg="green")
+        self._error_profile = Profile(fg="red", dim=True)
         self._detail_profile = Profile(dim=True)
 
         #
@@ -96,31 +95,31 @@ class App():
         is_a_tty = sys.stdout.isatty() and sys.stderr.isatty()
 
         # Enable interactive mode if we're attached to a tty
-        if main_options['no_interactive']:
+        if main_options["no_interactive"]:
             self.interactive = False
         else:
             self.interactive = is_a_tty
 
         # Handle errors interactively if we're in interactive mode
         # and --on-error was not specified on the command line
-        if main_options.get('on_error') is not None:
+        if main_options.get("on_error") is not None:
             self._interactive_failures = False
         else:
             self._interactive_failures = self.interactive
 
         # Use color output if we're attached to a tty, unless
         # otherwise specified on the command line
-        if main_options['colors'] is None:
+        if main_options["colors"] is None:
             self.colors = is_a_tty
-        elif main_options['colors']:
+        elif main_options["colors"]:
             self.colors = True
         else:
             self.colors = False
 
-        if main_options['directory']:
+        if main_options["directory"]:
             self._set_project_dir = True
         else:
-            main_options['directory'] = os.getcwd()
+            main_options["directory"] = os.getcwd()
 
     # create()
     #
@@ -133,9 +132,10 @@ class App():
     #
     @classmethod
     def create(cls, *args, **kwargs):
-        if sys.platform.startswith('linux'):
+        if sys.platform.startswith("linux"):
             # Use an App with linux specific features
             from .linuxapp import LinuxApp  # pylint: disable=cyclic-import
+
             return LinuxApp(*args, **kwargs)
         else:
             # The base App() class is default
@@ -163,8 +163,8 @@ class App():
     #
     @contextmanager
     def initialized(self, *, session_name=None):
-        directory = self._main_options['directory']
-        config = self._main_options['config']
+        directory = self._main_options["directory"]
+        config = self._main_options["config"]
 
         self._session_name = session_name
 
@@ -184,19 +184,19 @@ class App():
             # the command line when used, trumps the config files.
             #
             override_map = {
-                'strict': '_strict_build_plan',
-                'debug': 'log_debug',
-                'verbose': 'log_verbose',
-                'error_lines': 'log_error_lines',
-                'message_lines': 'log_message_lines',
-                'on_error': 'sched_error_action',
-                'fetchers': 'sched_fetchers',
-                'builders': 'sched_builders',
-                'pushers': 'sched_pushers',
-                'max_jobs': 'build_max_jobs',
-                'network_retries': 'sched_network_retries',
-                'pull_buildtrees': 'pull_buildtrees',
-                'cache_buildtrees': 'cache_buildtrees'
+                "strict": "_strict_build_plan",
+                "debug": "log_debug",
+                "verbose": "log_verbose",
+                "error_lines": "log_error_lines",
+                "message_lines": "log_message_lines",
+                "on_error": "sched_error_action",
+                "fetchers": "sched_fetchers",
+                "builders": "sched_builders",
+                "pushers": "sched_pushers",
+                "max_jobs": "build_max_jobs",
+                "network_retries": "sched_network_retries",
+                "pull_buildtrees": "pull_buildtrees",
+                "cache_buildtrees": "cache_buildtrees",
             }
             for cli_option, context_attr in override_map.items():
                 option_value = self._main_options.get(cli_option)
@@ -208,10 +208,13 @@ class App():
                 self._error_exit(e, "Error instantiating platform")
 
             # Create the stream right away, we'll need to pass it around.
-            self.stream = Stream(self.context, self._session_start,
-                                 session_start_callback=self.session_start_cb,
-                                 interrupt_callback=self._interrupt_handler,
-                                 ticker_callback=self._tick)
+            self.stream = Stream(
+                self.context,
+                self._session_start,
+                session_start_callback=self.session_start_cb,
+                interrupt_callback=self._interrupt_handler,
+                ticker_callback=self._tick,
+            )
 
             self._state = self.stream.get_state()
 
@@ -219,13 +222,16 @@ class App():
             self._state.register_task_failed_callback(self._job_failed)
 
             # Create the logger right before setting the message handler
-            self.logger = LogLine(self.context, self._state,
-                                  self._content_profile,
-                                  self._format_profile,
-                                  self._success_profile,
-                                  self._error_profile,
-                                  self._detail_profile,
-                                  indent=INDENT)
+            self.logger = LogLine(
+                self.context,
+                self._state,
+                self._content_profile,
+                self._format_profile,
+                self._success_profile,
+                self._error_profile,
+                self._detail_profile,
+                indent=INDENT,
+            )
 
             # Propagate pipeline feedback to the user
             self.context.messenger.set_message_handler(self._message_handler)
@@ -248,10 +254,15 @@ class App():
             self.stream.init()
 
             # Create our status printer, only available in interactive
-            self._status = Status(self.context, self._state,
-                                  self._content_profile, self._format_profile,
-                                  self._success_profile, self._error_profile,
-                                  self.stream)
+            self._status = Status(
+                self.context,
+                self._state,
+                self._content_profile,
+                self._format_profile,
+                self._success_profile,
+                self._error_profile,
+                self.stream,
+            )
 
             # Mark the beginning of the session
             if session_name:
@@ -261,9 +272,13 @@ class App():
             # Load the Project
             #
             try:
-                self.project = Project(directory, self.context, cli_options=self._main_options['option'],
-                                       default_mirror=self._main_options.get('default_mirror'),
-                                       fetch_subprojects=self.stream.fetch_subprojects)
+                self.project = Project(
+                    directory,
+                    self.context,
+                    cli_options=self._main_options["option"],
+                    default_mirror=self._main_options.get("default_mirror"),
+                    fetch_subprojects=self.stream.fetch_subprojects,
+                )
 
                 self.stream.set_project(self.project)
             except LoadError as e:
@@ -291,7 +306,7 @@ class App():
                     elapsed = self.stream.elapsed_time
 
                     if isinstance(e, StreamError) and e.terminated:  # pylint: disable=no-member
-                        self._message(MessageType.WARN, session_name + ' Terminated', elapsed=elapsed)
+                        self._message(MessageType.WARN, session_name + " Terminated", elapsed=elapsed)
                     else:
                         self._message(MessageType.FAIL, session_name, elapsed=elapsed)
 
@@ -304,8 +319,9 @@ class App():
                 # Exit with the error
                 self._error_exit(e)
             except RecursionError:
-                click.echo("RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.",
-                           err=True)
+                click.echo(
+                    "RecursionError: Dependency depth is too large. Maximum recursion depth exceeded.", err=True
+                )
                 sys.exit(-1)
 
             else:
@@ -331,41 +347,51 @@ class App():
     #    force (bool): Allow overwriting an existing project.conf
     #    target_directory (str): The target directory the project should be initialized in
     #
-    def init_project(self, project_name, format_version=BST_FORMAT_VERSION, element_path='elements',
-                     force=False, target_directory=None):
+    def init_project(
+        self,
+        project_name,
+        format_version=BST_FORMAT_VERSION,
+        element_path="elements",
+        force=False,
+        target_directory=None,
+    ):
         if target_directory:
             directory = os.path.abspath(target_directory)
         else:
-            directory = self._main_options['directory']
+            directory = self._main_options["directory"]
             directory = os.path.abspath(directory)
 
-        project_path = os.path.join(directory, 'project.conf')
+        project_path = os.path.join(directory, "project.conf")
 
         try:
             if self._set_project_dir:
-                raise AppError("Attempted to use -C or --directory with init.",
-                               reason='init-with-set-directory',
-                               detail="Please use 'bst init {}' instead.".format(directory))
+                raise AppError(
+                    "Attempted to use -C or --directory with init.",
+                    reason="init-with-set-directory",
+                    detail="Please use 'bst init {}' instead.".format(directory),
+                )
 
             # Abort if the project.conf already exists, unless `--force` was specified in `bst init`
             if not force and os.path.exists(project_path):
-                raise AppError("A project.conf already exists at: {}".format(project_path),
-                               reason='project-exists')
+                raise AppError("A project.conf already exists at: {}".format(project_path), reason="project-exists")
 
             if project_name:
                 # If project name was specified, user interaction is not desired, just
                 # perform some validation and write the project.conf
-                node._assert_symbol_name(project_name, 'project name')
+                node._assert_symbol_name(project_name, "project name")
                 self._assert_format_version(format_version)
                 self._assert_element_path(element_path)
 
             elif not self.interactive:
-                raise AppError("Cannot initialize a new project without specifying the project name",
-                               reason='unspecified-project-name')
+                raise AppError(
+                    "Cannot initialize a new project without specifying the project name",
+                    reason="unspecified-project-name",
+                )
             else:
                 # Collect the parameters using an interactive session
-                project_name, format_version, element_path = \
-                    self._init_project_interactive(project_name, format_version, element_path)
+                project_name, format_version, element_path = self._init_project_interactive(
+                    project_name, format_version, element_path
+                )
 
             # Create the directory if it doesnt exist
             try:
@@ -378,20 +404,21 @@ class App():
             try:
                 os.makedirs(elements_path, exist_ok=True)
             except IOError as e:
-                raise AppError("Error creating elements sub-directory {}: {}"
-                               .format(elements_path, e)) from e
+                raise AppError("Error creating elements sub-directory {}: {}".format(elements_path, e)) from e
 
             # Dont use ruamel.yaml here, because it doesnt let
             # us programatically insert comments or whitespace at
             # the toplevel.
             try:
-                with open(project_path, 'w') as f:
-                    f.write("# Unique project name\n" +
-                            "name: {}\n\n".format(project_name) +
-                            "# Required BuildStream format version\n" +
-                            "format-version: {}\n\n".format(format_version) +
-                            "# Subdirectory where elements are stored\n" +
-                            "element-path: {}\n".format(element_path))
+                with open(project_path, "w") as f:
+                    f.write(
+                        "# Unique project name\n"
+                        + "name: {}\n\n".format(project_name)
+                        + "# Required BuildStream format version\n"
+                        + "format-version: {}\n\n".format(format_version)
+                        + "# Subdirectory where elements are stored\n"
+                        + "element-path: {}\n".format(element_path)
+                    )
             except IOError as e:
                 raise AppError("Error writing {}: {}".format(project_path, e)) from e
 
@@ -419,15 +446,18 @@ class App():
         _, key, dim = element_key
 
         if self.colors:
-            prompt = self._format_profile.fmt('[') + \
-                self._content_profile.fmt(key, dim=dim) + \
-                self._format_profile.fmt('@') + \
-                self._content_profile.fmt(element_name) + \
-                self._format_profile.fmt(':') + \
-                self._content_profile.fmt('$PWD') + \
-                self._format_profile.fmt(']$') + ' '
+            prompt = (
+                self._format_profile.fmt("[")
+                + self._content_profile.fmt(key, dim=dim)
+                + self._format_profile.fmt("@")
+                + self._content_profile.fmt(element_name)
+                + self._format_profile.fmt(":")
+                + self._content_profile.fmt("$PWD")
+                + self._format_profile.fmt("]$")
+                + " "
+            )
         else:
-            prompt = '[{}@{}:${{PWD}}]$ '.format(key, element_name)
+            prompt = "[{}@{}:${{PWD}}]$ ".format(key, element_name)
 
         return prompt
 
@@ -473,8 +503,7 @@ class App():
     #
     def _message(self, message_type, message, **kwargs):
         args = dict(kwargs)
-        self.context.messenger.message(
-            Message(message_type, message, **args))
+        self.context.messenger.message(Message(message_type, message, **args))
 
     # Exception handler
     #
@@ -482,8 +511,7 @@ class App():
 
         # Print the regular BUG message
         formatted = "".join(traceback.format_exception(etype, value, tb))
-        self._message(MessageType.BUG, str(value),
-                      detail=formatted)
+        self._message(MessageType.BUG, str(value), detail=formatted)
 
         # If the scheduler has started, try to terminate all jobs gracefully,
         # otherwise exit immediately.
@@ -498,8 +526,7 @@ class App():
     def _maybe_render_status(self):
 
         # If we're suspended or terminating, then dont render the status area
-        if self._status and self.stream and \
-           not (self.stream.suspended or self.stream.terminated):
+        if self._status and self.stream and not (self.stream.suspended or self.stream.terminated):
             self._status.render()
 
     #
@@ -518,36 +545,40 @@ class App():
         # the currently ongoing tasks. We can also print something more
         # intelligent, like how many tasks remain to complete overall.
         with self._interrupted():
-            click.echo("\nUser interrupted with ^C\n" +
-                       "\n"
-                       "Choose one of the following options:\n" +
-                       "  (c)ontinue  - Continue queueing jobs as much as possible\n" +
-                       "  (q)uit      - Exit after all ongoing jobs complete\n" +
-                       "  (t)erminate - Terminate any ongoing jobs and exit\n" +
-                       "\n" +
-                       "Pressing ^C again will terminate jobs and exit\n",
-                       err=True)
+            click.echo(
+                "\nUser interrupted with ^C\n" + "\n"
+                "Choose one of the following options:\n"
+                + "  (c)ontinue  - Continue queueing jobs as much as possible\n"
+                + "  (q)uit      - Exit after all ongoing jobs complete\n"
+                + "  (t)erminate - Terminate any ongoing jobs and exit\n"
+                + "\n"
+                + "Pressing ^C again will terminate jobs and exit\n",
+                err=True,
+            )
 
             try:
-                choice = click.prompt("Choice:",
-                                      value_proc=_prefix_choice_value_proc(['continue', 'quit', 'terminate']),
-                                      default='continue', err=True)
+                choice = click.prompt(
+                    "Choice:",
+                    value_proc=_prefix_choice_value_proc(["continue", "quit", "terminate"]),
+                    default="continue",
+                    err=True,
+                )
             except (click.Abort, SystemError):
                 # In some cases, the readline buffer underlying the prompt gets corrupted on the second CTRL+C
                 # This throws a SystemError, which doesn't seem to be problematic for the rest of the program
 
                 # Ensure a newline after automatically printed '^C'
                 click.echo("", err=True)
-                choice = 'terminate'
+                choice = "terminate"
 
-            if choice == 'terminate':
+            if choice == "terminate":
                 click.echo("\nTerminating all jobs at user request\n", err=True)
                 self.stream.terminate()
             else:
-                if choice == 'quit':
+                if choice == "quit":
                     click.echo("\nCompleting ongoing tasks before quitting\n", err=True)
                     self.stream.quit()
-                elif choice == 'continue':
+                elif choice == "continue":
                     click.echo("\nContinuing\n", err=True)
 
     def _tick(self):
@@ -577,9 +608,11 @@ class App():
                 # the failure message reaches us ??
                 if not failure:
                     self._status.clear()
-                    click.echo("\n\n\nBUG: Message handling out of sync, " +
-                               "unable to retrieve failure message for element {}\n\n\n\n\n"
-                               .format(full_name), err=True)
+                    click.echo(
+                        "\n\n\nBUG: Message handling out of sync, "
+                        + "unable to retrieve failure message for element {}\n\n\n\n\n".format(full_name),
+                        err=True,
+                    )
                 else:
                     self._handle_failure(element, action_name, failure, full_name)
 
@@ -604,69 +637,72 @@ class App():
         # Interactive mode for element failures
         with self._interrupted():
 
-            summary = ("\n{} failure on element: {}\n".format(failure.action_name, full_name) +
-                       "\n" +
-                       "Choose one of the following options:\n" +
-                       "  (c)ontinue  - Continue queueing jobs as much as possible\n" +
-                       "  (q)uit      - Exit after all ongoing jobs complete\n" +
-                       "  (t)erminate - Terminate any ongoing jobs and exit\n" +
-                       "  (r)etry     - Retry this job\n")
+            summary = (
+                "\n{} failure on element: {}\n".format(failure.action_name, full_name)
+                + "\n"
+                + "Choose one of the following options:\n"
+                + "  (c)ontinue  - Continue queueing jobs as much as possible\n"
+                + "  (q)uit      - Exit after all ongoing jobs complete\n"
+                + "  (t)erminate - Terminate any ongoing jobs and exit\n"
+                + "  (r)etry     - Retry this job\n"
+            )
             if failure.logfile:
                 summary += "  (l)og       - View the full log file\n"
             if failure.sandbox:
                 summary += "  (s)hell     - Drop into a shell in the failed build sandbox\n"
             summary += "\nPressing ^C will terminate jobs and exit\n"
 
-            choices = ['continue', 'quit', 'terminate', 'retry']
+            choices = ["continue", "quit", "terminate", "retry"]
             if failure.logfile:
-                choices += ['log']
+                choices += ["log"]
             if failure.sandbox:
-                choices += ['shell']
+                choices += ["shell"]
 
-            choice = ''
-            while choice not in ['continue', 'quit', 'terminate', 'retry']:
+            choice = ""
+            while choice not in ["continue", "quit", "terminate", "retry"]:
                 click.echo(summary, err=True)
 
-                self._notify("BuildStream failure", "{} on element {}"
-                             .format(failure.action_name, full_name))
+                self._notify("BuildStream failure", "{} on element {}".format(failure.action_name, full_name))
 
                 try:
-                    choice = click.prompt("Choice:", default='continue', err=True,
-                                          value_proc=_prefix_choice_value_proc(choices))
+                    choice = click.prompt(
+                        "Choice:", default="continue", err=True, value_proc=_prefix_choice_value_proc(choices)
+                    )
                 except (click.Abort, SystemError):
                     # In some cases, the readline buffer underlying the prompt gets corrupted on the second CTRL+C
                     # This throws a SystemError, which doesn't seem to be problematic for the rest of the program
 
                     # Ensure a newline after automatically printed '^C'
                     click.echo("", err=True)
-                    choice = 'terminate'
+                    choice = "terminate"
 
                 # Handle choices which you can come back from
                 #
-                if choice == 'shell':
+                if choice == "shell":
                     click.echo("\nDropping into an interactive shell in the failed build sandbox\n", err=True)
                     try:
                         unique_id, element_key = element
                         prompt = self.shell_prompt(full_name, element_key)
-                        self.stream.shell(None, Scope.BUILD, prompt, isolate=True,
-                                          usebuildtree='always', unique_id=unique_id)
+                        self.stream.shell(
+                            None, Scope.BUILD, prompt, isolate=True, usebuildtree="always", unique_id=unique_id
+                        )
                     except BstError as e:
                         click.echo("Error while attempting to create interactive shell: {}".format(e), err=True)
-                elif choice == 'log':
-                    with open(failure.logfile, 'r') as logfile:
+                elif choice == "log":
+                    with open(failure.logfile, "r") as logfile:
                         content = logfile.read()
                         click.echo_via_pager(content)
 
-            if choice == 'terminate':
+            if choice == "terminate":
                 click.echo("\nTerminating all jobs\n", err=True)
                 self.stream.terminate()
             else:
-                if choice == 'quit':
+                if choice == "quit":
                     click.echo("\nCompleting ongoing tasks before quitting\n", err=True)
                     self.stream.quit()
-                elif choice == 'continue':
+                elif choice == "continue":
                     click.echo("\nContinuing with other non failing elements\n", err=True)
-                elif choice == 'retry':
+                elif choice == "retry":
                     click.echo("\nRetrying failed job\n", err=True)
                     unique_id = element[0]
                     self.stream._failure_retry(action_name, unique_id)
@@ -678,17 +714,14 @@ class App():
     def session_start_cb(self):
         self._started = True
         if self._session_name:
-            self.logger.print_heading(self.project,
-                                      self.stream,
-                                      log_file=self._main_options['log_file'])
+            self.logger.print_heading(self.project, self.stream, log_file=self._main_options["log_file"])
 
     #
     # Print a summary of the queues
     #
     def _print_summary(self):
         click.echo("", err=True)
-        self.logger.print_summary(self.stream,
-                                  self._main_options['log_file'])
+        self.logger.print_summary(self.stream, self._main_options["log_file"])
 
     # _error_exit()
     #
@@ -720,7 +753,7 @@ class App():
         click.echo(main_error, err=True)
         if error.detail:
             indent = " " * INDENT
-            detail = '\n' + indent + indent.join(error.detail.splitlines(True))
+            detail = "\n" + indent + indent.join(error.detail.splitlines(True))
             click.echo(detail, err=True)
 
         sys.exit(-1)
@@ -753,8 +786,8 @@ class App():
         self._maybe_render_status()
 
         # Additionally log to a file
-        if self._main_options['log_file']:
-            click.echo(text, file=self._main_options['log_file'], color=False, nl=False)
+        if self._main_options["log_file"]:
+            click.echo(text, file=self._main_options["log_file"], color=False, nl=False)
 
     @contextmanager
     def _interrupted(self):
@@ -768,25 +801,26 @@ class App():
     # Some validation routines for project initialization
     #
     def _assert_format_version(self, format_version):
-        message = "The version must be supported by this " + \
-                  "version of buildstream (0 - {})\n".format(BST_FORMAT_VERSION)
+        message = "The version must be supported by this " + "version of buildstream (0 - {})\n".format(
+            BST_FORMAT_VERSION
+        )
 
         # Validate that it is an integer
         try:
             number = int(format_version)
         except ValueError as e:
-            raise AppError(message, reason='invalid-format-version') from e
+            raise AppError(message, reason="invalid-format-version") from e
 
         # Validate that the specified version is supported
         if number < 0 or number > BST_FORMAT_VERSION:
-            raise AppError(message, reason='invalid-format-version')
+            raise AppError(message, reason="invalid-format-version")
 
     def _assert_element_path(self, element_path):
         message = "The element path cannot be an absolute path or contain any '..' components\n"
 
         # Validate the path is not absolute
         if os.path.isabs(element_path):
-            raise AppError(message, reason='invalid-element-path')
+            raise AppError(message, reason="invalid-element-path")
 
         # Validate that the path does not contain any '..' components
         path = element_path
@@ -794,8 +828,8 @@ class App():
             split = os.path.split(path)
             path = split[0]
             basename = split[1]
-            if basename == '..':
-                raise AppError(message, reason='invalid-element-path')
+            if basename == "..":
+                raise AppError(message, reason="invalid-element-path")
 
     # _init_project_interactive()
     #
@@ -811,11 +845,10 @@ class App():
     #    format_version (int): The user selected format version
     #    element_path (str): The user selected element path
     #
-    def _init_project_interactive(self, project_name, format_version=BST_FORMAT_VERSION, element_path='elements'):
-
+    def _init_project_interactive(self, project_name, format_version=BST_FORMAT_VERSION, element_path="elements"):
         def project_name_proc(user_input):
             try:
-                node._assert_symbol_name(user_input, 'project name')
+                node._assert_symbol_name(user_input, "project name")
             except LoadError as e:
                 message = "{}\n\n{}\n".format(e, e.detail)
                 raise UsageError(message) from e
@@ -835,63 +868,101 @@ class App():
                 raise UsageError(str(e)) from e
             return user_input
 
-        w = TextWrapper(initial_indent='  ', subsequent_indent='  ', width=79)
+        w = TextWrapper(initial_indent="  ", subsequent_indent="  ", width=79)
 
         # Collect project name
         click.echo("", err=True)
         click.echo(self._content_profile.fmt("Choose a unique name for your project"), err=True)
         click.echo(self._format_profile.fmt("-------------------------------------"), err=True)
         click.echo("", err=True)
-        click.echo(self._detail_profile.fmt(
-            w.fill("The project name is a unique symbol for your project and will be used "
-                   "to distinguish your project from others in user preferences, namspaceing "
-                   "of your project's artifacts in shared artifact caches, and in any case where "
-                   "BuildStream needs to distinguish between multiple projects.")), err=True)
+        click.echo(
+            self._detail_profile.fmt(
+                w.fill(
+                    "The project name is a unique symbol for your project and will be used "
+                    "to distinguish your project from others in user preferences, namspaceing "
+                    "of your project's artifacts in shared artifact caches, and in any case where "
+                    "BuildStream needs to distinguish between multiple projects."
+                )
+            ),
+            err=True,
+        )
         click.echo("", err=True)
-        click.echo(self._detail_profile.fmt(
-            w.fill("The project name must contain only alphanumeric characters, "
-                   "may not start with a digit, and may contain dashes or underscores.")), err=True)
+        click.echo(
+            self._detail_profile.fmt(
+                w.fill(
+                    "The project name must contain only alphanumeric characters, "
+                    "may not start with a digit, and may contain dashes or underscores."
+                )
+            ),
+            err=True,
+        )
         click.echo("", err=True)
-        project_name = click.prompt(self._content_profile.fmt("Project name"),
-                                    value_proc=project_name_proc, err=True)
+        project_name = click.prompt(self._content_profile.fmt("Project name"), value_proc=project_name_proc, err=True)
         click.echo("", err=True)
 
         # Collect format version
         click.echo(self._content_profile.fmt("Select the minimum required format version for your project"), err=True)
         click.echo(self._format_profile.fmt("-----------------------------------------------------------"), err=True)
         click.echo("", err=True)
-        click.echo(self._detail_profile.fmt(
-            w.fill("The format version is used to provide users who build your project "
-                   "with a helpful error message in the case that they do not have a recent "
-                   "enough version of BuildStream supporting all the features which your "
-                   "project might use.")), err=True)
+        click.echo(
+            self._detail_profile.fmt(
+                w.fill(
+                    "The format version is used to provide users who build your project "
+                    "with a helpful error message in the case that they do not have a recent "
+                    "enough version of BuildStream supporting all the features which your "
+                    "project might use."
+                )
+            ),
+            err=True,
+        )
         click.echo("", err=True)
-        click.echo(self._detail_profile.fmt(
-            w.fill("The lowest version allowed is 0, the currently installed version of BuildStream "
-                   "supports up to format version {}.".format(BST_FORMAT_VERSION))), err=True)
+        click.echo(
+            self._detail_profile.fmt(
+                w.fill(
+                    "The lowest version allowed is 0, the currently installed version of BuildStream "
+                    "supports up to format version {}.".format(BST_FORMAT_VERSION)
+                )
+            ),
+            err=True,
+        )
 
         click.echo("", err=True)
-        format_version = click.prompt(self._content_profile.fmt("Format version"),
-                                      value_proc=format_version_proc,
-                                      default=format_version, err=True)
+        format_version = click.prompt(
+            self._content_profile.fmt("Format version"),
+            value_proc=format_version_proc,
+            default=format_version,
+            err=True,
+        )
         click.echo("", err=True)
 
         # Collect element path
         click.echo(self._content_profile.fmt("Select the element path"), err=True)
         click.echo(self._format_profile.fmt("-----------------------"), err=True)
         click.echo("", err=True)
-        click.echo(self._detail_profile.fmt(
-            w.fill("The element path is a project subdirectory where element .bst files are stored "
-                   "within your project.")), err=True)
+        click.echo(
+            self._detail_profile.fmt(
+                w.fill(
+                    "The element path is a project subdirectory where element .bst files are stored "
+                    "within your project."
+                )
+            ),
+            err=True,
+        )
         click.echo("", err=True)
-        click.echo(self._detail_profile.fmt(
-            w.fill("Elements will be displayed in logs as filenames relative to "
-                   "the element path, and similarly, dependencies must be expressed as filenames "
-                   "relative to the element path.")), err=True)
+        click.echo(
+            self._detail_profile.fmt(
+                w.fill(
+                    "Elements will be displayed in logs as filenames relative to "
+                    "the element path, and similarly, dependencies must be expressed as filenames "
+                    "relative to the element path."
+                )
+            ),
+            err=True,
+        )
         click.echo("", err=True)
-        element_path = click.prompt(self._content_profile.fmt("Element path"),
-                                    value_proc=element_path_proc,
-                                    default=element_path, err=True)
+        element_path = click.prompt(
+            self._content_profile.fmt("Element path"), value_proc=element_path_proc, default=element_path, err=True
+        )
 
         return (project_name, format_version, element_path)
 
@@ -909,7 +980,6 @@ class App():
 # ask for a new input.
 #
 def _prefix_choice_value_proc(choices):
-
     def value_proc(user_input):
         remaining_candidate = [choice for choice in choices if choice.startswith(user_input)]
 
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index 5c02935..935a492 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -17,8 +17,8 @@ from ..utils import _get_compression, UtilError
 #              Helper classes and methods for Click              #
 ##################################################################
 
-class FastEnumType(click.Choice):
 
+class FastEnumType(click.Choice):
     def __init__(self, enum):
         self._enum = enum
         super().__init__(enum.values())
@@ -45,7 +45,7 @@ class FastEnumType(click.Choice):
 #
 def search_command(args, *, context=None):
     if context is None:
-        context = cli.make_context('bst', args, resilient_parsing=True)
+        context = cli.make_context("bst", args, resilient_parsing=True)
 
     # Loop into the deepest command
     command = cli
@@ -54,9 +54,7 @@ def search_command(args, *, context=None):
         command = command_ctx.command.get_command(command_ctx, cmd)
         if command is None:
             return None
-        command_ctx = command.make_context(command.name, [command.name],
-                                           parent=command_ctx,
-                                           resilient_parsing=True)
+        command_ctx = command.make_context(command.name, [command.name], parent=command_ctx, resilient_parsing=True)
 
     return command_ctx
 
@@ -65,8 +63,11 @@ def search_command(args, *, context=None):
 def complete_commands(cmd, args, incomplete):
     command_ctx = search_command(args[1:])
     if command_ctx and command_ctx.command and isinstance(command_ctx.command, click.MultiCommand):
-        return [subcommand + " " for subcommand in command_ctx.command.list_commands(command_ctx)
-                if not command_ctx.command.get_command(command_ctx, subcommand).hidden]
+        return [
+            subcommand + " "
+            for subcommand in command_ctx.command.list_commands(command_ctx)
+            if not command_ctx.command.get_command(command_ctx, subcommand).hidden
+        ]
 
     return []
 
@@ -80,18 +81,19 @@ def complete_target(args, incomplete):
     """
 
     from .. import utils
-    project_conf = 'project.conf'
+
+    project_conf = "project.conf"
 
     # First resolve the directory, in case there is an
     # active --directory/-C option
     #
-    base_directory = '.'
+    base_directory = "."
     idx = -1
     try:
-        idx = args.index('-C')
+        idx = args.index("-C")
     except ValueError:
         try:
-            idx = args.index('--directory')
+            idx = args.index("--directory")
         except ValueError:
             pass
 
@@ -116,7 +118,7 @@ def complete_target(args, incomplete):
             return []
 
     # The project is not required to have an element-path
-    element_directory = project.get_str('element-path', default='')
+    element_directory = project.get_str("element-path", default="")
 
     # If a project was loaded, use its element-path to
     # adjust our completion's base directory
@@ -132,19 +134,20 @@ def complete_target(args, incomplete):
 
 def complete_artifact(orig_args, args, incomplete):
     from .._context import Context
+
     with Context(use_casd=False) as ctx:
 
         config = None
         if orig_args:
             for i, arg in enumerate(orig_args):
-                if arg in ('-c', '--config'):
+                if arg in ("-c", "--config"):
                     try:
                         config = orig_args[i + 1]
                     except IndexError:
                         pass
         if args:
             for i, arg in enumerate(args):
-                if arg in ('-c', '--config'):
+                if arg in ("-c", "--config"):
                     try:
                         config = args[i + 1]
                     except IndexError:
@@ -167,38 +170,40 @@ def override_completions(orig_args, cmd, cmd_param, args, incomplete):
     :return: all the possible user-specified completions for the param
     """
 
-    if cmd.name == 'help':
+    if cmd.name == "help":
         return complete_commands(cmd, args, incomplete)
 
     # We can't easily extend click's data structures without
     # modifying click itself, so just do some weak special casing
     # right here and select which parameters we want to handle specially.
     if isinstance(cmd_param.type, click.Path):
-        if (cmd_param.name == 'elements' or
-                cmd_param.name == 'element' or
-                cmd_param.name == 'except_' or
-                cmd_param.opts == ['--track'] or
-                cmd_param.opts == ['--track-except']):
+        if (
+            cmd_param.name == "elements"
+            or cmd_param.name == "element"
+            or cmd_param.name == "except_"
+            or cmd_param.opts == ["--track"]
+            or cmd_param.opts == ["--track-except"]
+        ):
             return complete_target(args, incomplete)
-        if cmd_param.name == 'artifacts' or cmd_param.name == 'target':
+        if cmd_param.name == "artifacts" or cmd_param.name == "target":
             return complete_artifact(orig_args, args, incomplete)
 
     raise CompleteUnhandled()
 
 
 def validate_output_streams():
-    if sys.platform == 'win32':
+    if sys.platform == "win32":
         # Windows does not support 'fcntl', the module is unavailable there as
         # of Python 3.7, therefore early-out here.
         return
 
     import fcntl
+
     for stream in (sys.stdout, sys.stderr):
         fileno = stream.fileno()
         flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
         if flags & os.O_NONBLOCK:
-            click.echo("{} is currently set to O_NONBLOCK, try opening a new shell"
-                       .format(stream.name), err=True)
+            click.echo("{} is currently set to O_NONBLOCK, try opening a new shell".format(stream.name), err=True)
             sys.exit(-1)
 
 
@@ -237,8 +242,7 @@ def handle_bst_force_start_method_env():
             sys.exit(-1)
 
 
-def override_main(self, args=None, prog_name=None, complete_var=None,
-                  standalone_mode=True, **extra):
+def override_main(self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra):
 
     # Hook for the Bash completion.  This only activates if the Bash
     # completion is actually enabled, otherwise this is quite a fast
@@ -250,7 +254,7 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
         #
         # The below is a quicker exit path for the sake
         # of making completions respond faster.
-        if 'BST_TEST_SUITE' not in os.environ:
+        if "BST_TEST_SUITE" not in os.environ:
             sys.stdout.flush()
             sys.stderr.flush()
             os._exit(0)
@@ -269,14 +273,13 @@ def override_main(self, args=None, prog_name=None, complete_var=None,
     # case of testing, our tests preceed our entrypoint, so we do our best.
     handle_bst_force_start_method_env()
 
-    original_main(self, args=args, prog_name=prog_name, complete_var=None,
-                  standalone_mode=standalone_mode, **extra)
+    original_main(self, args=args, prog_name=prog_name, complete_var=None, standalone_mode=standalone_mode, **extra)
 
 
 original_main = click.BaseCommand.main
 # Disable type checking since mypy doesn't support assigning to a method.
 # See https://github.com/python/mypy/issues/2427.
-click.BaseCommand.main = override_main      # type: ignore
+click.BaseCommand.main = override_main  # type: ignore
 
 
 ##################################################################
@@ -287,58 +290,78 @@ def print_version(ctx, param, value):
         return
 
     from .. import __version__
+
     click.echo(__version__)
     ctx.exit()
 
 
-@click.group(context_settings=dict(help_option_names=['-h', '--help']))
-@click.option('--version', is_flag=True, callback=print_version,
-              expose_value=False, is_eager=True)
-@click.option('--config', '-c',
-              type=click.Path(exists=True, dir_okay=False, readable=True),
-              help="Configuration file to use")
-@click.option('--directory', '-C', default=None,  # Set to os.getcwd() later.
-              type=click.Path(file_okay=False, readable=True),
-              help="Project directory (default: current directory)")
-@click.option('--on-error', default=None,
-              type=FastEnumType(_SchedulerErrorAction),
-              help="What to do when an error is encountered")
-@click.option('--fetchers', type=click.INT, default=None,
-              help="Maximum simultaneous download tasks")
-@click.option('--builders', type=click.INT, default=None,
-              help="Maximum simultaneous build tasks")
-@click.option('--pushers', type=click.INT, default=None,
-              help="Maximum simultaneous upload tasks")
-@click.option('--max-jobs', type=click.INT, default=None,
-              help="Number of parallel jobs allowed for a given build task")
-@click.option('--network-retries', type=click.INT, default=None,
-              help="Maximum retries for network tasks")
-@click.option('--no-interactive', is_flag=True,
-              help="Force non interactive mode, otherwise this is automatically decided")
-@click.option('--verbose/--no-verbose', default=None,
-              help="Be extra verbose")
-@click.option('--debug/--no-debug', default=None,
-              help="Print debugging output")
-@click.option('--error-lines', type=click.INT, default=None,
-              help="Maximum number of lines to show from a task log")
-@click.option('--message-lines', type=click.INT, default=None,
-              help="Maximum number of lines to show in a detailed message")
-@click.option('--log-file',
-              type=click.File(mode='w', encoding='UTF-8'),
-              help="A file to store the main log (allows storing the main log while in interactive mode)")
-@click.option('--colors/--no-colors', default=None,
-              help="Force enable/disable ANSI color codes in output")
-@click.option('--strict/--no-strict', default=None, is_flag=True,
-              help="Elements must be rebuilt when their dependencies have changed")
-@click.option('--option', '-o', type=click.Tuple([str, str]), multiple=True, metavar='OPTION VALUE',
-              help="Specify a project option")
-@click.option('--default-mirror', default=None,
-              help="The mirror to fetch from first, before attempting other mirrors")
-@click.option('--pull-buildtrees', is_flag=True, default=None,
-              help="Include an element's build tree when pulling remote element artifacts")
-@click.option('--cache-buildtrees', default=None,
-              type=FastEnumType(_CacheBuildTrees),
-              help="Cache artifact build tree content on creation")
+@click.group(context_settings=dict(help_option_names=["-h", "--help"]))
+@click.option("--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True)
+@click.option(
+    "--config", "-c", type=click.Path(exists=True, dir_okay=False, readable=True), help="Configuration file to use"
+)
+@click.option(
+    "--directory",
+    "-C",
+    default=None,  # Set to os.getcwd() later.
+    type=click.Path(file_okay=False, readable=True),
+    help="Project directory (default: current directory)",
+)
+@click.option(
+    "--on-error",
+    default=None,
+    type=FastEnumType(_SchedulerErrorAction),
+    help="What to do when an error is encountered",
+)
+@click.option("--fetchers", type=click.INT, default=None, help="Maximum simultaneous download tasks")
+@click.option("--builders", type=click.INT, default=None, help="Maximum simultaneous build tasks")
+@click.option("--pushers", type=click.INT, default=None, help="Maximum simultaneous upload tasks")
+@click.option(
+    "--max-jobs", type=click.INT, default=None, help="Number of parallel jobs allowed for a given build task"
+)
+@click.option("--network-retries", type=click.INT, default=None, help="Maximum retries for network tasks")
+@click.option(
+    "--no-interactive", is_flag=True, help="Force non interactive mode, otherwise this is automatically decided"
+)
+@click.option("--verbose/--no-verbose", default=None, help="Be extra verbose")
+@click.option("--debug/--no-debug", default=None, help="Print debugging output")
+@click.option("--error-lines", type=click.INT, default=None, help="Maximum number of lines to show from a task log")
+@click.option(
+    "--message-lines", type=click.INT, default=None, help="Maximum number of lines to show in a detailed message"
+)
+@click.option(
+    "--log-file",
+    type=click.File(mode="w", encoding="UTF-8"),
+    help="A file to store the main log (allows storing the main log while in interactive mode)",
+)
+@click.option("--colors/--no-colors", default=None, help="Force enable/disable ANSI color codes in output")
+@click.option(
+    "--strict/--no-strict",
+    default=None,
+    is_flag=True,
+    help="Elements must be rebuilt when their dependencies have changed",
+)
+@click.option(
+    "--option",
+    "-o",
+    type=click.Tuple([str, str]),
+    multiple=True,
+    metavar="OPTION VALUE",
+    help="Specify a project option",
+)
+@click.option("--default-mirror", default=None, help="The mirror to fetch from first, before attempting other mirrors")
+@click.option(
+    "--pull-buildtrees",
+    is_flag=True,
+    default=None,
+    help="Include an element's build tree when pulling remote element artifacts",
+)
+@click.option(
+    "--cache-buildtrees",
+    default=None,
+    type=FastEnumType(_CacheBuildTrees),
+    help="Cache artifact build tree content on creation",
+)
 @click.pass_context
 def cli(context, **kwargs):
     """Build and manipulate BuildStream projects
@@ -360,17 +383,15 @@ def cli(context, **kwargs):
 ##################################################################
 #                           Help Command                         #
 ##################################################################
-@cli.command(name="help", short_help="Print usage information",
-             context_settings={"help_option_names": []})
-@click.argument("command", nargs=-1, metavar='COMMAND')
+@cli.command(name="help", short_help="Print usage information", context_settings={"help_option_names": []})
+@click.argument("command", nargs=-1, metavar="COMMAND")
 @click.pass_context
 def help_command(ctx, command):
     """Print usage information about a given command
     """
     command_ctx = search_command(command, context=ctx.parent)
     if not command_ctx:
-        click.echo("Not a valid command: '{} {}'"
-                   .format(ctx.parent.info_name, " ".join(command)), err=True)
+        click.echo("Not a valid command: '{} {}'".format(ctx.parent.info_name, " ".join(command)), err=True)
         sys.exit(-1)
 
     click.echo(command_ctx.command.get_help(command_ctx), err=True)
@@ -380,24 +401,32 @@ def help_command(ctx, command):
         detail = " "
         if command:
             detail = " {} ".format(" ".join(command))
-        click.echo("\nFor usage on a specific command: {} help{}COMMAND"
-                   .format(ctx.parent.info_name, detail), err=True)
+        click.echo(
+            "\nFor usage on a specific command: {} help{}COMMAND".format(ctx.parent.info_name, detail), err=True
+        )
 
 
 ##################################################################
 #                           Init Command                         #
 ##################################################################
 @cli.command(short_help="Initialize a new BuildStream project")
-@click.option('--project-name', type=click.STRING,
-              help="The project name to use")
-@click.option('--format-version', type=click.INT, default=BST_FORMAT_VERSION, show_default=True,
-              help="The required format version")
-@click.option('--element-path', type=click.Path(), default="elements", show_default=True,
-              help="The subdirectory to store elements in")
-@click.option('--force', '-f', is_flag=True,
-              help="Allow overwriting an existing project.conf")
-@click.argument('target-directory', nargs=1, required=False,
-                type=click.Path(file_okay=False, writable=True))
+@click.option("--project-name", type=click.STRING, help="The project name to use")
+@click.option(
+    "--format-version",
+    type=click.INT,
+    default=BST_FORMAT_VERSION,
+    show_default=True,
+    help="The required format version",
+)
+@click.option(
+    "--element-path",
+    type=click.Path(),
+    default="elements",
+    show_default=True,
+    help="The subdirectory to store elements in",
+)
+@click.option("--force", "-f", is_flag=True, help="Allow overwriting an existing project.conf")
+@click.argument("target-directory", nargs=1, required=False, type=click.Path(file_okay=False, writable=True))
 @click.pass_obj
 def init(app, project_name, format_version, element_path, force, target_directory):
     """Initialize a new BuildStream project
@@ -415,13 +444,11 @@ def init(app, project_name, format_version, element_path, force, target_director
 #                          Build Command                         #
 ##################################################################
 @cli.command(short_help="Build elements in a pipeline")
-@click.option('--deps', '-d', default=None,
-              type=click.Choice(['plan', 'all']),
-              help='The dependencies to build')
-@click.option('--remote', '-r', default=None,
-              help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@click.option("--deps", "-d", default=None, type=click.Choice(["plan", "all"]), help="The dependencies to build")
+@click.option(
+    "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)"
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def build(app, elements, deps, remote):
     """Build elements in a pipeline
@@ -450,30 +477,41 @@ def build(app, elements, deps, remote):
             # Junction elements cannot be built, exclude them from default targets
             ignore_junction_targets = True
 
-        app.stream.build(elements,
-                         selection=deps,
-                         ignore_junction_targets=ignore_junction_targets,
-                         remote=remote)
+        app.stream.build(elements, selection=deps, ignore_junction_targets=ignore_junction_targets, remote=remote)
 
 
 ##################################################################
 #                           Show Command                         #
 ##################################################################
 @cli.command(short_help="Show elements in the pipeline")
-@click.option('--except', 'except_', multiple=True,
-              type=click.Path(readable=False),
-              help="Except certain dependencies")
-@click.option('--deps', '-d', default='all', show_default=True,
-              type=click.Choice(['none', 'plan', 'run', 'build', 'all']),
-              help='The dependencies to show')
-@click.option('--order', default="stage", show_default=True,
-              type=click.Choice(['stage', 'alpha']),
-              help='Staging or alphabetic ordering of dependencies')
-@click.option('--format', '-f', 'format_', metavar='FORMAT', default=None,
-              type=click.STRING,
-              help='Format string for each element')
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--except", "except_", multiple=True, type=click.Path(readable=False), help="Except certain dependencies"
+)
+@click.option(
+    "--deps",
+    "-d",
+    default="all",
+    show_default=True,
+    type=click.Choice(["none", "plan", "run", "build", "all"]),
+    help="The dependencies to show",
+)
+@click.option(
+    "--order",
+    default="stage",
+    show_default=True,
+    type=click.Choice(["stage", "alpha"]),
+    help="Staging or alphabetic ordering of dependencies",
+)
+@click.option(
+    "--format",
+    "-f",
+    "format_",
+    metavar="FORMAT",
+    default=None,
+    type=click.STRING,
+    help="Format string for each element",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def show(app, elements, deps, except_, order, format_):
     """Show elements in the pipeline
@@ -536,9 +574,7 @@ def show(app, elements, deps, except_, order, format_):
         if not elements:
             elements = app.project.get_default_targets()
 
-        dependencies = app.stream.load_selection(elements,
-                                                 selection=deps,
-                                                 except_targets=except_)
+        dependencies = app.stream.load_selection(elements, selection=deps, except_targets=except_)
 
         if order == "alpha":
             dependencies = sorted(dependencies)
@@ -554,25 +590,34 @@ def show(app, elements, deps, except_, order, format_):
 #                          Shell Command                         #
 ##################################################################
 @cli.command(short_help="Shell into an element's sandbox environment")
-@click.option('--build', '-b', 'build_', is_flag=True,
-              help='Stage dependencies and sources to build')
-@click.option('--sysroot', '-s', default=None,
-              type=click.Path(exists=True, file_okay=False, readable=True),
-              help="An existing sysroot")
-@click.option('--mount', type=click.Tuple([click.Path(exists=True), str]), multiple=True,
-              metavar='HOSTPATH PATH',
-              help="Mount a file or directory into the sandbox")
-@click.option('--isolate', is_flag=True,
-              help='Create an isolated build sandbox')
-@click.option('--use-buildtree', '-t', 'cli_buildtree', type=click.Choice(['ask', 'try', 'always', 'never']),
-              default='ask', show_default=True,
-              help=('Use a buildtree. If `always` is set, will always fail to '
-                    'build if a buildtree is not available.'))
-@click.option('--pull', 'pull_', is_flag=True,
-              help='Attempt to pull missing or incomplete artifacts')
-@click.argument('element', required=False,
-                type=click.Path(readable=False))
-@click.argument('command', type=click.STRING, nargs=-1)
+@click.option("--build", "-b", "build_", is_flag=True, help="Stage dependencies and sources to build")
+@click.option(
+    "--sysroot",
+    "-s",
+    default=None,
+    type=click.Path(exists=True, file_okay=False, readable=True),
+    help="An existing sysroot",
+)
+@click.option(
+    "--mount",
+    type=click.Tuple([click.Path(exists=True), str]),
+    multiple=True,
+    metavar="HOSTPATH PATH",
+    help="Mount a file or directory into the sandbox",
+)
+@click.option("--isolate", is_flag=True, help="Create an isolated build sandbox")
+@click.option(
+    "--use-buildtree",
+    "-t",
+    "cli_buildtree",
+    type=click.Choice(["ask", "try", "always", "never"]),
+    default="ask",
+    show_default=True,
+    help=("Use a buildtree. If `always` is set, will always fail to " "build if a buildtree is not available."),
+)
+@click.option("--pull", "pull_", is_flag=True, help="Attempt to pull missing or incomplete artifacts")
+@click.argument("element", required=False, type=click.Path(readable=False))
+@click.argument("command", type=click.STRING, nargs=-1)
 @click.pass_obj
 def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, command):
     """Run a command in the target element's sandbox environment
@@ -616,8 +661,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
             if not element:
                 raise AppError('Missing argument "ELEMENT".')
 
-        elements = app.stream.load_selection((element,), selection=selection,
-                                             use_artifact_config=True)
+        elements = app.stream.load_selection((element,), selection=selection, use_artifact_config=True)
 
         # last one will be the element we want to stage, previous ones are
         # elements to try and pull
@@ -628,10 +672,7 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
         element_key = element._get_display_key()
 
         prompt = app.shell_prompt(element_name, element_key)
-        mounts = [
-            HostMount(path, host_path)
-            for host_path, path in mount
-        ]
+        mounts = [HostMount(path, host_path) for host_path, path in mount]
 
         cached = element._cached_buildtree()
         buildtree_exists = element._buildtree_exists()
@@ -640,27 +681,31 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
             if buildtree_exists or pull_:
                 use_buildtree = cli_buildtree
                 if not cached and use_buildtree == "always":
-                    click.echo("WARNING: buildtree is not cached locally, will attempt to pull from available remotes",
-                               err=True)
+                    click.echo(
+                        "WARNING: buildtree is not cached locally, will attempt to pull from available remotes",
+                        err=True,
+                    )
             else:
                 if cli_buildtree == "always":
                     # Exit early if it won't be possible to even fetch a buildtree with always option
                     raise AppError("Artifact was created without buildtree, unable to launch shell with it")
-                click.echo("WARNING: Artifact created without buildtree, shell will be loaded without it",
-                           err=True)
+                click.echo("WARNING: Artifact created without buildtree, shell will be loaded without it", err=True)
         else:
             # If the value has defaulted to ask and in non interactive mode, don't consider the buildtree, this
             # being the default behaviour of the command
             if app.interactive and cli_buildtree == "ask":
-                if cached and bool(click.confirm('Do you want to use the cached buildtree?')):
+                if cached and bool(click.confirm("Do you want to use the cached buildtree?")):
                     use_buildtree = "always"
                 elif buildtree_exists:
                     try:
-                        choice = click.prompt("Do you want to pull & use a cached buildtree?",
-                                              type=click.Choice(['try', 'always', 'never']),
-                                              err=True, show_choices=True)
+                        choice = click.prompt(
+                            "Do you want to pull & use a cached buildtree?",
+                            type=click.Choice(["try", "always", "never"]),
+                            err=True,
+                            show_choices=True,
+                        )
                     except click.Abort:
-                        click.echo('Aborting', err=True)
+                        click.echo("Aborting", err=True)
                         sys.exit(-1)
 
                     if choice != "never":
@@ -671,13 +716,17 @@ def shell(app, element, sysroot, mount, isolate, build_, cli_buildtree, pull_, c
             click.echo("WARNING: using a buildtree from a failed build.", err=True)
 
         try:
-            exitcode = app.stream.shell(element, scope, prompt,
-                                        directory=sysroot,
-                                        mounts=mounts,
-                                        isolate=isolate,
-                                        command=command,
-                                        usebuildtree=use_buildtree,
-                                        pull_dependencies=pull_dependencies)
+            exitcode = app.stream.shell(
+                element,
+                scope,
+                prompt,
+                directory=sysroot,
+                mounts=mounts,
+                isolate=isolate,
+                command=command,
+                usebuildtree=use_buildtree,
+                pull_dependencies=pull_dependencies,
+            )
         except BstError as e:
             raise AppError("Error launching shell: {}".format(e), detail=e.detail) from e
 
@@ -697,20 +746,27 @@ def source():
 #                     Source Fetch Command                       #
 ##################################################################
 @source.command(name="fetch", short_help="Fetch sources in a pipeline")
-@click.option('--except', 'except_', multiple=True,
-              type=click.Path(readable=False),
-              help="Except certain dependencies from fetching")
-@click.option('--deps', '-d', default='plan', show_default=True,
-              type=click.Choice(['none', 'plan', 'all']),
-              help='The dependencies to fetch')
-@click.option('--track', 'track_', is_flag=True,
-              help="Track new source references before fetching")
-@click.option('--track-cross-junctions', '-J', is_flag=True,
-              help="Allow tracking to cross junction boundaries")
-@click.option('--remote', '-r', default=None,
-              help="The URL of the remote source cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--except",
+    "except_",
+    multiple=True,
+    type=click.Path(readable=False),
+    help="Except certain dependencies from fetching",
+)
+@click.option(
+    "--deps",
+    "-d",
+    default="plan",
+    show_default=True,
+    type=click.Choice(["none", "plan", "all"]),
+    help="The dependencies to fetch",
+)
+@click.option("--track", "track_", is_flag=True, help="Track new source references before fetching")
+@click.option("--track-cross-junctions", "-J", is_flag=True, help="Allow tracking to cross junction boundaries")
+@click.option(
+    "--remote", "-r", default=None, help="The URL of the remote source cache (defaults to the first configured cache)"
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, remote):
     """Fetch sources required to build the pipeline
@@ -741,36 +797,48 @@ def source_fetch(app, elements, deps, track_, except_, track_cross_junctions, re
         sys.exit(-1)
 
     if track_ and deps == PipelineSelection.PLAN:
-        click.echo("WARNING: --track specified for tracking of a build plan\n\n"
-                   "Since tracking modifies the build plan, all elements will be tracked.", err=True)
+        click.echo(
+            "WARNING: --track specified for tracking of a build plan\n\n"
+            "Since tracking modifies the build plan, all elements will be tracked.",
+            err=True,
+        )
         deps = PipelineSelection.ALL
 
     with app.initialized(session_name="Fetch"):
         if not elements:
             elements = app.project.get_default_targets()
 
-        app.stream.fetch(elements,
-                         selection=deps,
-                         except_targets=except_,
-                         track_targets=track_,
-                         track_cross_junctions=track_cross_junctions,
-                         remote=remote)
+        app.stream.fetch(
+            elements,
+            selection=deps,
+            except_targets=except_,
+            track_targets=track_,
+            track_cross_junctions=track_cross_junctions,
+            remote=remote,
+        )
 
 
 ##################################################################
 #                     Source Track Command                       #
 ##################################################################
 @source.command(name="track", short_help="Track new source references")
-@click.option('--except', 'except_', multiple=True,
-              type=click.Path(readable=False),
-              help="Except certain dependencies from tracking")
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['none', 'all']),
-              help='The dependencies to track')
-@click.option('--cross-junctions', '-J', is_flag=True,
-              help="Allow crossing junction boundaries")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--except",
+    "except_",
+    multiple=True,
+    type=click.Path(readable=False),
+    help="Except certain dependencies from tracking",
+)
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["none", "all"]),
+    help="The dependencies to track",
+)
+@click.option("--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def source_track(app, elements, deps, except_, cross_junctions):
     """Consults the specified tracking branches for new versions available
@@ -800,41 +868,50 @@ def source_track(app, elements, deps, except_, cross_junctions):
 
         # Substitute 'none' for 'redirect' so that element redirections
         # will be done
-        if deps == 'none':
-            deps = 'redirect'
-        app.stream.track(elements,
-                         selection=deps,
-                         except_targets=except_,
-                         cross_junctions=cross_junctions)
+        if deps == "none":
+            deps = "redirect"
+        app.stream.track(elements, selection=deps, except_targets=except_, cross_junctions=cross_junctions)
 
 
 ##################################################################
 #                  Source Checkout Command                      #
 ##################################################################
-@source.command(name='checkout', short_help='Checkout sources of an element')
-@click.option('--force', '-f', is_flag=True,
-              help="Allow files to be overwritten")
-@click.option('--except', 'except_', multiple=True,
-              type=click.Path(readable=False),
-              help="Except certain dependencies")
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['build', 'none', 'run', 'all']),
-              help='The dependencies whose sources to checkout')
-@click.option('--tar', default=None, metavar='LOCATION',
-              type=click.Path(),
-              help="Create a tarball containing the sources instead "
-                   "of a file tree.")
-@click.option('--compression', default=None,
-              type=click.Choice(['gz', 'xz', 'bz2']),
-              help="The compression option of the tarball created.")
-@click.option('--include-build-scripts', 'build_scripts', is_flag=True)
-@click.option('--directory', default='source-checkout',
-              type=click.Path(file_okay=False),
-              help="The directory to checkout the sources to")
-@click.argument('element', required=False, type=click.Path(readable=False))
+@source.command(name="checkout", short_help="Checkout sources of an element")
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+    "--except", "except_", multiple=True, type=click.Path(readable=False), help="Except certain dependencies"
+)
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["build", "none", "run", "all"]),
+    help="The dependencies whose sources to checkout",
+)
+@click.option(
+    "--tar",
+    default=None,
+    metavar="LOCATION",
+    type=click.Path(),
+    help="Create a tarball containing the sources instead " "of a file tree.",
+)
+@click.option(
+    "--compression",
+    default=None,
+    type=click.Choice(["gz", "xz", "bz2"]),
+    help="The compression option of the tarball created.",
+)
+@click.option("--include-build-scripts", "build_scripts", is_flag=True)
+@click.option(
+    "--directory",
+    default="source-checkout",
+    type=click.Path(file_okay=False),
+    help="The directory to checkout the sources to",
+)
+@click.argument("element", required=False, type=click.Path(readable=False))
 @click.pass_obj
-def source_checkout(app, element, directory, force, deps, except_,
-                    tar, compression, build_scripts):
+def source_checkout(app, element, directory, force, deps, except_, tar, compression, build_scripts):
     """Checkout sources of an element to the specified location
 
     When this command is executed from a workspace directory, the default
@@ -859,14 +936,16 @@ def source_checkout(app, element, directory, force, deps, except_,
             if not element:
                 raise AppError('Missing argument "ELEMENT".')
 
-        app.stream.source_checkout(element,
-                                   location=location,
-                                   force=force,
-                                   deps=deps,
-                                   except_targets=except_,
-                                   tar=bool(tar),
-                                   compression=compression,
-                                   include_build_scripts=build_scripts)
+        app.stream.source_checkout(
+            element,
+            location=location,
+            force=force,
+            deps=deps,
+            except_targets=except_,
+            tar=bool(tar),
+            compression=compression,
+            include_build_scripts=build_scripts,
+        )
 
 
 ##################################################################
@@ -880,39 +959,42 @@ def workspace():
 ##################################################################
 #                     Workspace Open Command                     #
 ##################################################################
-@workspace.command(name='open', short_help="Open a new workspace")
-@click.option('--no-checkout', is_flag=True,
-              help="Do not checkout the source, only link to the given directory")
-@click.option('--force', '-f', is_flag=True,
-              help="The workspace will be created even if the directory in which it will be created is not empty " +
-              "or if a workspace for that element already exists")
-@click.option('--track', 'track_', is_flag=True,
-              help="Track and fetch new source references before checking out the workspace")
-@click.option('--directory', type=click.Path(file_okay=False), default=None,
-              help="Only for use when a single Element is given: Set the directory to use to create the workspace")
-@click.argument('elements', nargs=-1, type=click.Path(readable=False), required=True)
+@workspace.command(name="open", short_help="Open a new workspace")
+@click.option("--no-checkout", is_flag=True, help="Do not checkout the source, only link to the given directory")
+@click.option(
+    "--force",
+    "-f",
+    is_flag=True,
+    help="The workspace will be created even if the directory in which it will be created is not empty "
+    + "or if a workspace for that element already exists",
+)
+@click.option(
+    "--track", "track_", is_flag=True, help="Track and fetch new source references before checking out the workspace"
+)
+@click.option(
+    "--directory",
+    type=click.Path(file_okay=False),
+    default=None,
+    help="Only for use when a single Element is given: Set the directory to use to create the workspace",
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False), required=True)
 @click.pass_obj
 def workspace_open(app, no_checkout, force, track_, directory, elements):
     """Open a workspace for manual source modification"""
 
     with app.initialized():
-        app.stream.workspace_open(elements,
-                                  no_checkout=no_checkout,
-                                  track_first=track_,
-                                  force=force,
-                                  custom_dir=directory)
+        app.stream.workspace_open(
+            elements, no_checkout=no_checkout, track_first=track_, force=force, custom_dir=directory
+        )
 
 
 ##################################################################
 #                     Workspace Close Command                    #
 ##################################################################
-@workspace.command(name='close', short_help="Close workspaces")
-@click.option('--remove-dir', is_flag=True,
-              help="Remove the path that contains the closed workspace")
-@click.option('--all', '-a', 'all_', is_flag=True,
-              help="Close all open workspaces")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@workspace.command(name="close", short_help="Close workspaces")
+@click.option("--remove-dir", is_flag=True, help="Remove the path that contains the closed workspace")
+@click.option("--all", "-a", "all_", is_flag=True, help="Close all open workspaces")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def workspace_close(app, remove_dir, all_, elements):
     """Close a workspace"""
@@ -927,11 +1009,11 @@ def workspace_close(app, remove_dir, all_, elements):
             if element:
                 elements = (element,)
             else:
-                raise AppError('No elements specified')
+                raise AppError("No elements specified")
 
         # Early exit if we specified `all` and there are no workspaces
         if all_ and not app.stream.workspace_exists():
-            click.echo('No open workspaces to close', err=True)
+            click.echo("No open workspaces to close", err=True)
             sys.exit(0)
 
         if all_:
@@ -958,21 +1040,19 @@ def workspace_close(app, remove_dir, all_, elements):
     if removed_required_element:
         click.echo(
             "Removed '{}', therefore you can no longer run BuildStream "
-            "commands from the current directory.".format(element_name), err=True)
+            "commands from the current directory.".format(element_name),
+            err=True,
+        )
 
 
 ##################################################################
 #                     Workspace Reset Command                    #
 ##################################################################
-@workspace.command(name='reset', short_help="Reset a workspace to its original state")
-@click.option('--soft', is_flag=True,
-              help="Reset workspace state without affecting its contents")
-@click.option('--track', 'track_', is_flag=True,
-              help="Track and fetch the latest source before resetting")
-@click.option('--all', '-a', 'all_', is_flag=True,
-              help="Reset all open workspaces")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@workspace.command(name="reset", short_help="Reset a workspace to its original state")
+@click.option("--soft", is_flag=True, help="Reset workspace state without affecting its contents")
+@click.option("--track", "track_", is_flag=True, help="Track and fetch the latest source before resetting")
+@click.option("--all", "-a", "all_", is_flag=True, help="Reset all open workspaces")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def workspace_reset(app, soft, track_, all_, elements):
     """Reset a workspace to its original state"""
@@ -985,7 +1065,7 @@ def workspace_reset(app, soft, track_, all_, elements):
             if element:
                 elements = (element,)
             else:
-                raise AppError('No elements specified to reset')
+                raise AppError("No elements specified to reset")
 
         if all_ and not app.stream.workspace_exists():
             raise AppError("No open workspaces to reset")
@@ -999,7 +1079,7 @@ def workspace_reset(app, soft, track_, all_, elements):
 ##################################################################
 #                     Workspace List Command                     #
 ##################################################################
-@workspace.command(name='list', short_help="List open workspaces")
+@workspace.command(name="list", short_help="List open workspaces")
 @click.pass_obj
 def workspace_list(app):
     """List open workspaces"""
@@ -1044,11 +1124,16 @@ def artifact():
 #############################################################
 #                    Artifact show Command                  #
 #############################################################
-@artifact.command(name='show', short_help="Show the cached state of artifacts")
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['build', 'run', 'all', 'none']),
-              help='The dependencies we also want to show')
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="show", short_help="Show the cached state of artifacts")
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["build", "run", "all", "none"]),
+    help="The dependencies we also want to show",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
 @click.pass_obj
 def artifact_show(app, deps, artifacts):
     """show the cached state of artifacts"""
@@ -1061,31 +1146,38 @@ def artifact_show(app, deps, artifacts):
 #####################################################################
 #                     Artifact Checkout Command                     #
 #####################################################################
-@artifact.command(name='checkout', short_help="Checkout contents of an artifact")
-@click.option('--force', '-f', is_flag=True,
-              help="Allow files to be overwritten")
-@click.option('--deps', '-d', default='run', show_default=True,
-              type=click.Choice(['run', 'build', 'none', 'all']),
-              help='The dependencies to checkout')
-@click.option('--integrate/--no-integrate', default=None, is_flag=True,
-              help="Whether to run integration commands")
-@click.option('--hardlinks', is_flag=True,
-              help="Checkout hardlinks instead of copying if possible")
-@click.option('--tar', default=None, metavar='LOCATION',
-              type=click.Path(),
-              help="Create a tarball from the artifact contents instead "
-                   "of a file tree. If LOCATION is '-', the tarball "
-                   "will be dumped to the standard output.")
-@click.option('--compression', default=None,
-              type=click.Choice(['gz', 'xz', 'bz2']),
-              help="The compression option of the tarball created.")
-@click.option('--pull', 'pull_', is_flag=True,
-              help="Pull the artifact if it's missing or incomplete.")
-@click.option('--directory', default=None,
-              type=click.Path(file_okay=False),
-              help="The directory to checkout the artifact to")
-@click.argument('target', required=False,
-                type=click.Path(readable=False))
+@artifact.command(name="checkout", short_help="Checkout contents of an artifact")
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+    "--deps",
+    "-d",
+    default="run",
+    show_default=True,
+    type=click.Choice(["run", "build", "none", "all"]),
+    help="The dependencies to checkout",
+)
+@click.option("--integrate/--no-integrate", default=None, is_flag=True, help="Whether to run integration commands")
+@click.option("--hardlinks", is_flag=True, help="Checkout hardlinks instead of copying if possible")
+@click.option(
+    "--tar",
+    default=None,
+    metavar="LOCATION",
+    type=click.Path(),
+    help="Create a tarball from the artifact contents instead "
+    "of a file tree. If LOCATION is '-', the tarball "
+    "will be dumped to the standard output.",
+)
+@click.option(
+    "--compression",
+    default=None,
+    type=click.Choice(["gz", "xz", "bz2"]),
+    help="The compression option of the tarball created.",
+)
+@click.option("--pull", "pull_", is_flag=True, help="Pull the artifact if it's missing or incomplete.")
+@click.option(
+    "--directory", default=None, type=click.Path(file_okay=False), help="The directory to checkout the artifact to"
+)
+@click.argument("target", required=False, type=click.Path(readable=False))
 @click.pass_obj
 def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression, pull_, directory, target):
     """Checkout contents of an artifact
@@ -1110,7 +1202,7 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
                 location = os.path.abspath(os.path.join(os.getcwd(), target))
             else:
                 location = directory
-            if location[-4:] == '.bst':
+            if location[-4:] == ".bst":
                 location = location[:-4]
             tar = False
     else:
@@ -1120,9 +1212,12 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
         except UtilError as e:
             click.echo("ERROR: Invalid file extension given with '--tar': {}".format(e), err=True)
             sys.exit(-1)
-        if compression and inferred_compression != '' and inferred_compression != compression:
-            click.echo("WARNING: File extension and compression differ."
-                       "File extension has been overridden by --compression", err=True)
+        if compression and inferred_compression != "" and inferred_compression != compression:
+            click.echo(
+                "WARNING: File extension and compression differ."
+                "File extension has been overridden by --compression",
+                err=True,
+            )
         if not compression:
             compression = inferred_compression
 
@@ -1132,28 +1227,35 @@ def artifact_checkout(app, force, deps, integrate, hardlinks, tar, compression,
             if not target:
                 raise AppError('Missing argument "ELEMENT".')
 
-        app.stream.checkout(target,
-                            location=location,
-                            force=force,
-                            selection=deps,
-                            integrate=True if integrate is None else integrate,
-                            hardlinks=hardlinks,
-                            pull=pull_,
-                            compression=compression,
-                            tar=bool(tar))
+        app.stream.checkout(
+            target,
+            location=location,
+            force=force,
+            selection=deps,
+            integrate=True if integrate is None else integrate,
+            hardlinks=hardlinks,
+            pull=pull_,
+            compression=compression,
+            tar=bool(tar),
+        )
 
 
 ################################################################
 #                     Artifact Pull Command                    #
 ################################################################
 @artifact.command(name="pull", short_help="Pull a built artifact")
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['none', 'all']),
-              help='The dependency artifacts to pull')
-@click.option('--remote', '-r', default=None,
-              help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('artifacts', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["none", "all"]),
+    help="The dependency artifacts to pull",
+)
+@click.option(
+    "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)"
+)
+@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def artifact_pull(app, artifacts, deps, remote):
     """Pull a built artifact from the configured remote artifact cache.
@@ -1184,21 +1286,25 @@ def artifact_pull(app, artifacts, deps, remote):
             # Junction elements cannot be pulled, exclude them from default targets
             ignore_junction_targets = True
 
-        app.stream.pull(artifacts, selection=deps, remote=remote,
-                        ignore_junction_targets=ignore_junction_targets)
+        app.stream.pull(artifacts, selection=deps, remote=remote, ignore_junction_targets=ignore_junction_targets)
 
 
 ##################################################################
 #                     Artifact Push Command                      #
 ##################################################################
 @artifact.command(name="push", short_help="Push a built artifact")
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['none', 'all']),
-              help='The dependencies to push')
-@click.option('--remote', '-r', default=None,
-              help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('artifacts', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["none", "all"]),
+    help="The dependencies to push",
+)
+@click.option(
+    "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)"
+)
+@click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def artifact_push(app, artifacts, deps, remote):
     """Push a built artifact to a remote artifact cache.
@@ -1231,18 +1337,19 @@ def artifact_push(app, artifacts, deps, remote):
             # Junction elements cannot be pushed, exclude them from default targets
             ignore_junction_targets = True
 
-        app.stream.push(artifacts, selection=deps, remote=remote,
-                        ignore_junction_targets=ignore_junction_targets)
+        app.stream.push(artifacts, selection=deps, remote=remote, ignore_junction_targets=ignore_junction_targets)
 
 
 ################################################################
 #                     Artifact Log Command                     #
 ################################################################
-@artifact.command(name='log', short_help="Show logs of artifacts")
-@click.option('--out',
-              type=click.Path(file_okay=True, writable=True),
-              help="Output logs to individual files in the specified path. If absent, logs are written to stdout.")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="log", short_help="Show logs of artifacts")
+@click.option(
+    "--out",
+    type=click.Path(file_okay=True, writable=True),
+    help="Output logs to individual files in the specified path. If absent, logs are written to stdout.",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
 @click.pass_obj
 def artifact_log(app, artifacts, out):
     """Show build logs of artifacts"""
@@ -1252,7 +1359,7 @@ def artifact_log(app, artifacts, out):
         if not out:
             try:
                 for log in list(artifact_logs.values()):
-                    with open(log[0], 'r') as f:
+                    with open(log[0], "r") as f:
                         data = f.read()
                     click.echo_via_pager(data)
             except (OSError, FileNotFoundError):
@@ -1274,7 +1381,7 @@ def artifact_log(app, artifacts, out):
                         shutil.copy(log, dest)
                     # make a dir and write in log files
                 else:
-                    log_name = os.path.splitext(name)[0] + '.log'
+                    log_name = os.path.splitext(name)[0] + ".log"
                     dest = os.path.join(out, log_name)
                     shutil.copy(log_files[0], dest)
                     # write a log file
@@ -1283,10 +1390,11 @@ def artifact_log(app, artifacts, out):
 ################################################################
 #                Artifact List-Contents Command                #
 ################################################################
-@artifact.command(name='list-contents', short_help="List the contents of an artifact")
-@click.option('--long', '-l', 'long_', is_flag=True,
-              help="Provide more information about the contents of the artifact.")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="list-contents", short_help="List the contents of an artifact")
+@click.option(
+    "--long", "-l", "long_", is_flag=True, help="Provide more information about the contents of the artifact."
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
 @click.pass_obj
 def artifact_list_contents(app, artifacts, long_):
     """List the contents of an artifact.
@@ -1308,11 +1416,16 @@ def artifact_list_contents(app, artifacts, long_):
 ###################################################################
 #                     Artifact Delete Command                     #
 ###################################################################
-@artifact.command(name='delete', short_help="Remove artifacts from the local cache")
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['none', 'run', 'build', 'all']),
-              help="The dependencies to delete")
-@click.argument('artifacts', type=click.Path(), nargs=-1)
+@artifact.command(name="delete", short_help="Remove artifacts from the local cache")
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["none", "run", "build", "all"]),
+    help="The dependencies to delete",
+)
+@click.argument("artifacts", type=click.Path(), nargs=-1)
 @click.pass_obj
 def artifact_delete(app, artifacts, deps):
     """Remove artifacts from the local cache"""
@@ -1333,18 +1446,24 @@ def artifact_delete(app, artifacts, deps):
 #                          Fetch Command                         #
 ##################################################################
 @cli.command(short_help="COMMAND OBSOLETE - Fetch sources in a pipeline", hidden=True)
-@click.option('--except', 'except_', multiple=True,
-              type=click.Path(readable=False),
-              help="Except certain dependencies from fetching")
-@click.option('--deps', '-d', default='plan', show_default=True,
-              type=click.Choice(['none', 'plan', 'all']),
-              help='The dependencies to fetch')
-@click.option('--track', 'track_', is_flag=True,
-              help="Track new source references before fetching")
-@click.option('--track-cross-junctions', '-J', is_flag=True,
-              help="Allow tracking to cross junction boundaries")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--except",
+    "except_",
+    multiple=True,
+    type=click.Path(readable=False),
+    help="Except certain dependencies from fetching",
+)
+@click.option(
+    "--deps",
+    "-d",
+    default="plan",
+    show_default=True,
+    type=click.Choice(["none", "plan", "all"]),
+    help="The dependencies to fetch",
+)
+@click.option("--track", "track_", is_flag=True, help="Track new source references before fetching")
+@click.option("--track-cross-junctions", "-J", is_flag=True, help="Allow tracking to cross junction boundaries")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def fetch(app, elements, deps, track_, except_, track_cross_junctions):
     click.echo("This command is now obsolete. Use `bst source fetch` instead.", err=True)
@@ -1355,16 +1474,23 @@ def fetch(app, elements, deps, track_, except_, track_cross_junctions):
 #                          Track Command                         #
 ##################################################################
 @cli.command(short_help="COMMAND OBSOLETE - Track new source references", hidden=True)
-@click.option('--except', 'except_', multiple=True,
-              type=click.Path(readable=False),
-              help="Except certain dependencies from tracking")
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['none', 'all']),
-              help='The dependencies to track')
-@click.option('--cross-junctions', '-J', is_flag=True,
-              help="Allow crossing junction boundaries")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--except",
+    "except_",
+    multiple=True,
+    type=click.Path(readable=False),
+    help="Except certain dependencies from tracking",
+)
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["none", "all"]),
+    help="The dependencies to track",
+)
+@click.option("--cross-junctions", "-J", is_flag=True, help="Allow crossing junction boundaries")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def track(app, elements, deps, except_, cross_junctions):
     click.echo("This command is now obsolete. Use `bst source track` instead.", err=True)
@@ -1375,26 +1501,33 @@ def track(app, elements, deps, except_, cross_junctions):
 #                        Checkout Command                        #
 ##################################################################
 @cli.command(short_help="COMMAND OBSOLETE - Checkout a built artifact", hidden=True)
-@click.option('--force', '-f', is_flag=True,
-              help="Allow files to be overwritten")
-@click.option('--deps', '-d', default='run', show_default=True,
-              type=click.Choice(['run', 'build', 'none']),
-              help='The dependencies to checkout')
-@click.option('--integrate/--no-integrate', default=True,
-              help="Run integration commands (default is to run commands)")
-@click.option('--hardlinks', is_flag=True,
-              help="Checkout hardlinks instead of copies (handle with care)")
-@click.option('--tar', is_flag=True,
-              help="Create a tarball from the artifact contents instead "
-                   "of a file tree. If LOCATION is '-', the tarball "
-                   "will be dumped to the standard output.")
-@click.argument('element', required=False,
-                type=click.Path(readable=False))
-@click.argument('location', type=click.Path(), required=False)
+@click.option("--force", "-f", is_flag=True, help="Allow files to be overwritten")
+@click.option(
+    "--deps",
+    "-d",
+    default="run",
+    show_default=True,
+    type=click.Choice(["run", "build", "none"]),
+    help="The dependencies to checkout",
+)
+@click.option("--integrate/--no-integrate", default=True, help="Run integration commands (default is to run commands)")
+@click.option("--hardlinks", is_flag=True, help="Checkout hardlinks instead of copies (handle with care)")
+@click.option(
+    "--tar",
+    is_flag=True,
+    help="Create a tarball from the artifact contents instead "
+    "of a file tree. If LOCATION is '-', the tarball "
+    "will be dumped to the standard output.",
+)
+@click.argument("element", required=False, type=click.Path(readable=False))
+@click.argument("location", type=click.Path(), required=False)
 @click.pass_obj
 def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
-    click.echo("This command is now obsolete. Use `bst artifact checkout` instead " +
-               "and use the --directory option to specify LOCATION", err=True)
+    click.echo(
+        "This command is now obsolete. Use `bst artifact checkout` instead "
+        + "and use the --directory option to specify LOCATION",
+        err=True,
+    )
     sys.exit(1)
 
 
@@ -1402,13 +1535,16 @@ def checkout(app, element, location, force, deps, integrate, hardlinks, tar):
 #                          Pull Command                        #
 ################################################################
 @cli.command(short_help="COMMAND OBSOLETE - Pull a built artifact", hidden=True)
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['none', 'all']),
-              help='The dependency artifacts to pull')
-@click.option('--remote', '-r',
-              help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["none", "all"]),
+    help="The dependency artifacts to pull",
+)
+@click.option("--remote", "-r", help="The URL of the remote cache (defaults to the first configured cache)")
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def pull(app, elements, deps, remote):
     click.echo("This command is now obsolete. Use `bst artifact pull` instead.", err=True)
@@ -1419,13 +1555,18 @@ def pull(app, elements, deps, remote):
 #                           Push Command                         #
 ##################################################################
 @cli.command(short_help="COMMAND OBSOLETE - Push a built artifact", hidden=True)
-@click.option('--deps', '-d', default='none', show_default=True,
-              type=click.Choice(['none', 'all']),
-              help='The dependencies to push')
-@click.option('--remote', '-r', default=None,
-              help="The URL of the remote cache (defaults to the first configured cache)")
-@click.argument('elements', nargs=-1,
-                type=click.Path(readable=False))
+@click.option(
+    "--deps",
+    "-d",
+    default="none",
+    show_default=True,
+    type=click.Choice(["none", "all"]),
+    help="The dependencies to push",
+)
+@click.option(
+    "--remote", "-r", default=None, help="The URL of the remote cache (defaults to the first configured cache)"
+)
+@click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
 def push(app, elements, deps, remote):
     click.echo("This command is now obsolete. Use `bst artifact push` instead.", err=True)
diff --git a/src/buildstream/_frontend/complete.py b/src/buildstream/_frontend/complete.py
index 06067f6..45e857e 100644
--- a/src/buildstream/_frontend/complete.py
+++ b/src/buildstream/_frontend/complete.py
@@ -39,9 +39,9 @@ import click
 from click.core import MultiCommand, Option, Argument
 from click.parser import split_arg_string
 
-WORDBREAK = '='
+WORDBREAK = "="
 
-COMPLETION_SCRIPT = '''
+COMPLETION_SCRIPT = """
 %(complete_func)s() {
     local IFS=$'\n'
     COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
@@ -51,7 +51,7 @@ COMPLETION_SCRIPT = '''
 }
 
 complete -F %(complete_func)s -o nospace %(script_names)s
-'''
+"""
 
 
 # An exception for our custom completion handler to
@@ -62,7 +62,7 @@ class CompleteUnhandled(Exception):
     pass
 
 
-def complete_path(path_type, incomplete, base_directory='.'):
+def complete_path(path_type, incomplete, base_directory="."):
     """Helper method for implementing the completions() method
     for File and Path parameter types.
     """
@@ -71,7 +71,7 @@ def complete_path(path_type, incomplete, base_directory='.'):
     # specified in `incomplete` minus the last path component,
     # otherwise list files starting from the current working directory.
     entries = []
-    base_path = ''
+    base_path = ""
 
     # This is getting a bit messy
     listed_base_directory = False
@@ -128,11 +128,11 @@ def complete_path(path_type, incomplete, base_directory='.'):
 
     return [
         # Return an appropriate path for each entry
-        fix_path(e) for e in sorted(entries)
-
+        fix_path(e)
+        for e in sorted(entries)
         # Filter out non directory elements when searching for a directory,
         # the opposite is fine, however.
-        if not (path_type == 'Directory' and not entry_is_dir(e))
+        if not (path_type == "Directory" and not entry_is_dir(e))
     ]
 
 
@@ -183,7 +183,7 @@ def start_of_option(param_str):
     :param param_str: param_str to check
     :return: whether or not this is the start of an option declaration (i.e. starts "-" or "--")
     """
-    return param_str and param_str[:1] == '-'
+    return param_str and param_str[:1] == "-"
 
 
 def is_incomplete_option(all_args, cmd_param):
@@ -218,8 +218,11 @@ def is_incomplete_argument(current_params, cmd_param):
         return True
     if cmd_param.nargs == -1:
         return True
-    if isinstance(current_param_values, collections.abc.Iterable) \
-            and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs:
+    if (
+        isinstance(current_param_values, collections.abc.Iterable)
+        and cmd_param.nargs > 1
+        and len(current_param_values) < cmd_param.nargs
+    ):
         return True
     return False
 
@@ -237,10 +240,7 @@ def get_user_autocompletions(args, incomplete, cmd, cmd_param, override):
 
     # Use the type specific default completions unless it was overridden
     try:
-        return override(cmd=cmd,
-                        cmd_param=cmd_param,
-                        args=args,
-                        incomplete=incomplete)
+        return override(cmd=cmd, cmd_param=cmd_param, args=args, incomplete=incomplete)
     except CompleteUnhandled:
         return get_param_type_completion(cmd_param.type, incomplete) or []
 
@@ -269,7 +269,7 @@ def get_choices(cli, prog_name, args, incomplete, override):
         all_args.append(partition_incomplete[0])
         incomplete = partition_incomplete[2]
     elif incomplete == WORDBREAK:
-        incomplete = ''
+        incomplete = ""
 
     choices = []
     found_param = False
@@ -277,8 +277,13 @@ def get_choices(cli, prog_name, args, incomplete, override):
         # completions for options
         for param in ctx.command.params:
             if isinstance(param, Option):
-                choices.extend([param_opt + " " for param_opt in param.opts + param.secondary_opts
-                                if param_opt not in all_args or param.multiple])
+                choices.extend(
+                    [
+                        param_opt + " "
+                        for param_opt in param.opts + param.secondary_opts
+                        if param_opt not in all_args or param.multiple
+                    ]
+                )
         found_param = True
     if not found_param:
         # completion for option values by choices
@@ -297,14 +302,22 @@ def get_choices(cli, prog_name, args, incomplete, override):
 
     if not found_param and isinstance(ctx.command, MultiCommand):
         # completion for any subcommands
-        choices.extend([cmd + " " for cmd in ctx.command.list_commands(ctx)
-                        if not ctx.command.get_command(ctx, cmd).hidden])
-
-    if not start_of_option(incomplete) and ctx.parent is not None \
-       and isinstance(ctx.parent.command, MultiCommand) and ctx.parent.command.chain:
+        choices.extend(
+            [cmd + " " for cmd in ctx.command.list_commands(ctx) if not ctx.command.get_command(ctx, cmd).hidden]
+        )
+
+    if (
+        not start_of_option(incomplete)
+        and ctx.parent is not None
+        and isinstance(ctx.parent.command, MultiCommand)
+        and ctx.parent.command.chain
+    ):
         # completion for chained commands
-        visible_commands = [cmd for cmd in ctx.parent.command.list_commands(ctx.parent)
-                            if not ctx.parent.command.get_command(ctx.parent, cmd).hidden]
+        visible_commands = [
+            cmd
+            for cmd in ctx.parent.command.list_commands(ctx.parent)
+            if not ctx.parent.command.get_command(ctx.parent, cmd).hidden
+        ]
         remaining_commands = set(visible_commands) - set(ctx.parent.protected_args)
         choices.extend([cmd + " " for cmd in remaining_commands])
 
@@ -314,13 +327,13 @@ def get_choices(cli, prog_name, args, incomplete, override):
 
 
 def do_complete(cli, prog_name, override):
-    cwords = split_arg_string(os.environ['COMP_WORDS'])
-    cword = int(os.environ['COMP_CWORD'])
+    cwords = split_arg_string(os.environ["COMP_WORDS"])
+    cword = int(os.environ["COMP_CWORD"])
     args = cwords[1:cword]
     try:
         incomplete = cwords[cword]
     except IndexError:
-        incomplete = ''
+        incomplete = ""
 
     for item in get_choices(cli, prog_name, args, incomplete, override):
         click.echo(item)
@@ -331,7 +344,7 @@ def do_complete(cli, prog_name, override):
 def main_bashcomplete(cmd, prog_name, override):
     """Internal handler for the bash completion support."""
 
-    if '_BST_COMPLETION' in os.environ:
+    if "_BST_COMPLETION" in os.environ:
         do_complete(cmd, prog_name, override)
         return True
 
diff --git a/src/buildstream/_frontend/linuxapp.py b/src/buildstream/_frontend/linuxapp.py
index 0444dc7..987b023 100644
--- a/src/buildstream/_frontend/linuxapp.py
+++ b/src/buildstream/_frontend/linuxapp.py
@@ -28,9 +28,9 @@ from .app import App
 #
 def _osc_777_supported():
 
-    term = os.environ.get('TERM')
+    term = os.environ.get("TERM")
 
-    if term and (term.startswith('xterm') or term.startswith('vte')):
+    if term and (term.startswith("xterm") or term.startswith("vte")):
 
         # Since vte version 4600, upstream silently ignores
         # the OSC 777 without printing garbage to the terminal.
@@ -39,7 +39,7 @@ def _osc_777_supported():
         # will trigger a desktop notification and bring attention
         # to the terminal.
         #
-        vte_version = os.environ.get('VTE_VERSION')
+        vte_version = os.environ.get("VTE_VERSION")
         try:
             vte_version_int = int(vte_version)
         except (ValueError, TypeError):
@@ -54,7 +54,6 @@ def _osc_777_supported():
 # A linux specific App implementation
 #
 class LinuxApp(App):
-
     def notify(self, title, text):
 
         # Currently we only try this notification method
diff --git a/src/buildstream/_frontend/profile.py b/src/buildstream/_frontend/profile.py
index dda0f7f..f49be5b 100644
--- a/src/buildstream/_frontend/profile.py
+++ b/src/buildstream/_frontend/profile.py
@@ -28,7 +28,7 @@ import click
 # Kwargs:
 #    The same keyword arguments which can be used with click.style()
 #
-class Profile():
+class Profile:
     def __init__(self, **kwargs):
         self._kwargs = dict(kwargs)
 
@@ -64,7 +64,6 @@ class Profile():
     #    arguments
     #
     def fmt_subst(self, text, varname, value, **kwargs):
-
         def subst_callback(match):
             # Extract and format the "{(varname)...}" portion of the match
             inner_token = match.group(1)
diff --git a/src/buildstream/_frontend/status.py b/src/buildstream/_frontend/status.py
index 8da7df0..76a8a14 100644
--- a/src/buildstream/_frontend/status.py
+++ b/src/buildstream/_frontend/status.py
@@ -44,19 +44,14 @@ from .widget import TimeCode
 #    stream (Stream): The Stream
 #    colors (bool): Whether to print the ANSI color codes in the output
 #
-class Status():
+class Status:
 
     # Table of the terminal capabilities we require and use
-    _TERM_CAPABILITIES = {
-        'move_up': 'cuu1',
-        'move_x': 'hpa',
-        'clear_eol': 'el'
-    }
+    _TERM_CAPABILITIES = {"move_up": "cuu1", "move_x": "hpa", "clear_eol": "el"}
 
-    def __init__(self, context, state,
-                 content_profile, format_profile,
-                 success_profile, error_profile,
-                 stream, colors=False):
+    def __init__(
+        self, context, state, content_profile, format_profile, success_profile, error_profile, stream, colors=False
+    ):
 
         self._context = context
         self._state = state
@@ -69,10 +64,9 @@ class Status():
         self._last_lines = 0  # Number of status lines we last printed to console
         self._spacing = 1
         self._colors = colors
-        self._header = _StatusHeader(context, state,
-                                     content_profile, format_profile,
-                                     success_profile, error_profile,
-                                     stream)
+        self._header = _StatusHeader(
+            context, state, content_profile, format_profile, success_profile, error_profile, stream
+        )
 
         self._term_width, _ = click.get_terminal_size()
         self._alloc_lines = 0
@@ -133,7 +127,7 @@ class Status():
         # feeds for the amount of lines we intend to print first, and
         # move cursor position back to the first line
         for _ in range(self._alloc_lines + self._header.lines):
-            click.echo('', err=True)
+            click.echo("", err=True)
         for _ in range(self._alloc_lines + self._header.lines):
             self._move_up()
 
@@ -145,14 +139,14 @@ class Status():
         # alignment of each column
         n_columns = len(self._alloc_columns)
         for line in self._job_lines(n_columns):
-            text = ''
+            text = ""
             for job in line:
                 column = line.index(job)
                 text += job.render(self._alloc_columns[column] - job.size, elapsed)
 
                 # Add spacing between columns
                 if column < (n_columns - 1):
-                    text += ' ' * self._spacing
+                    text += " " * self._spacing
 
             # Print the line
             click.echo(text, color=self._colors, err=True)
@@ -198,7 +192,7 @@ class Status():
         # Initialized terminal, curses might decide it doesnt
         # support this terminal
         try:
-            curses.setupterm(os.environ.get('TERM', 'dumb'))
+            curses.setupterm(os.environ.get("TERM", "dumb"))
         except curses.error:
             return None
 
@@ -223,7 +217,7 @@ class Status():
             # as well, and should provide better compatibility with most
             # terminals.
             #
-            term_caps[capname] = code.decode('latin1')
+            term_caps[capname] = code.decode("latin1")
 
         return term_caps
 
@@ -238,19 +232,19 @@ class Status():
 
         # Explicitly move to beginning of line, fixes things up
         # when there was a ^C or ^Z printed to the terminal.
-        move_x = curses.tparm(self._term_caps['move_x'].encode('latin1'), 0)
-        move_x = move_x.decode('latin1')
+        move_x = curses.tparm(self._term_caps["move_x"].encode("latin1"), 0)
+        move_x = move_x.decode("latin1")
 
-        move_up = curses.tparm(self._term_caps['move_up'].encode('latin1'))
-        move_up = move_up.decode('latin1')
+        move_up = curses.tparm(self._term_caps["move_up"].encode("latin1"))
+        move_up = move_up.decode("latin1")
 
         click.echo(move_x + move_up, nl=False, err=True)
 
     def _clear_line(self):
         assert self._term_caps is not None
 
-        clear_eol = curses.tparm(self._term_caps['clear_eol'].encode('latin1'))
-        clear_eol = clear_eol.decode('latin1')
+        clear_eol = curses.tparm(self._term_caps["clear_eol"].encode("latin1"))
+        clear_eol = clear_eol.decode("latin1")
         click.echo(clear_eol, nl=False, err=True)
 
     def _allocate(self):
@@ -279,7 +273,7 @@ class Status():
     def _job_lines(self, columns):
         jobs_list = list(self._jobs.values())
         for i in range(0, len(self._jobs), columns):
-            yield jobs_list[i:i + columns]
+            yield jobs_list[i : i + columns]
 
     # Returns an array of integers representing the maximum
     # length in characters for each column, given the current
@@ -309,9 +303,7 @@ class Status():
     def _add_job(self, action_name, full_name):
         task = self._state.tasks[(action_name, full_name)]
         elapsed = task.elapsed_offset
-        job = _StatusJob(self._context, action_name, full_name,
-                         self._content_profile, self._format_profile,
-                         elapsed)
+        job = _StatusJob(self._context, action_name, full_name, self._content_profile, self._format_profile, elapsed)
         self._jobs[(action_name, full_name)] = job
         self._need_alloc = True
 
@@ -340,12 +332,8 @@ class Status():
 #    error_profile (Profile): Formatting profile for error text
 #    stream (Stream): The Stream
 #
-class _StatusHeader():
-
-    def __init__(self, context, state,
-                 content_profile, format_profile,
-                 success_profile, error_profile,
-                 stream):
+class _StatusHeader:
+    def __init__(self, context, state, content_profile, format_profile, success_profile, error_profile, stream):
 
         #
         # Public members
@@ -377,19 +365,22 @@ class _StatusHeader():
         total = str(len(self._stream.total_elements))
 
         size = 0
-        text = ''
+        text = ""
         size += len(total) + len(session) + 4  # Size for (N/N) with a leading space
         size += 8  # Size of time code
         size += len(project.name) + 1
         text += self._time_code.render_time(elapsed)
-        text += ' ' + self._content_profile.fmt(project.name)
-        text += ' ' + self._format_profile.fmt('(') + \
-                self._content_profile.fmt(session) + \
-                self._format_profile.fmt('/') + \
-                self._content_profile.fmt(total) + \
-                self._format_profile.fmt(')')
-
-        line1 = self._centered(text, size, line_length, '=')
+        text += " " + self._content_profile.fmt(project.name)
+        text += (
+            " "
+            + self._format_profile.fmt("(")
+            + self._content_profile.fmt(session)
+            + self._format_profile.fmt("/")
+            + self._content_profile.fmt(total)
+            + self._format_profile.fmt(")")
+        )
+
+        line1 = self._centered(text, size, line_length, "=")
 
         #
         # Line 2: Dynamic list of queue status reports
@@ -397,7 +388,7 @@ class _StatusHeader():
         #  (Sources Fetched:0 117 0)→ (Built:4 0 0)
         #
         size = 0
-        text = ''
+        text = ""
 
         # Format and calculate size for each queue progress
         for index, task_group in enumerate(self._state.task_groups.values()):
@@ -405,13 +396,13 @@ class _StatusHeader():
             # Add spacing
             if index > 0:
                 size += 2
-                text += self._format_profile.fmt('→ ')
+                text += self._format_profile.fmt("→ ")
 
             group_text, group_size = self._render_task_group(task_group)
             size += group_size
             text += group_text
 
-        line2 = self._centered(text, size, line_length, ' ')
+        line2 = self._centered(text, size, line_length, " ")
 
         #
         # Line 3: Cache usage percentage report
@@ -425,7 +416,7 @@ class _StatusHeader():
         if usage.used_size is None:
             # Cache usage is unknown
             size = 0
-            text = ''
+            text = ""
         else:
             size = 21
             size += len(usage_string)
@@ -436,15 +427,17 @@ class _StatusHeader():
             else:
                 formatted_usage = self._success_profile.fmt(usage_string)
 
-            text = self._format_profile.fmt("~~~~~~ ") + \
-                self._content_profile.fmt('cache') + \
-                self._format_profile.fmt(': ') + \
-                formatted_usage + \
-                self._format_profile.fmt(' ~~~~~~')
+            text = (
+                self._format_profile.fmt("~~~~~~ ")
+                + self._content_profile.fmt("cache")
+                + self._format_profile.fmt(": ")
+                + formatted_usage
+                + self._format_profile.fmt(" ~~~~~~")
+            )
 
-        line3 = self._centered(text, size, line_length, ' ')
+        line3 = self._centered(text, size, line_length, " ")
 
-        return line1 + '\n' + line2 + '\n' + line3
+        return line1 + "\n" + line2 + "\n" + line3
 
     ###################################################
     #                 Private Methods                 #
@@ -457,13 +450,17 @@ class _StatusHeader():
         size = 5  # Space for the formatting '[', ':', ' ', ' ' and ']'
         size += len(group.complete_name)
         size += len(processed) + len(skipped) + len(failed)
-        text = self._format_profile.fmt("(") + \
-            self._content_profile.fmt(group.complete_name) + \
-            self._format_profile.fmt(":") + \
-            self._success_profile.fmt(processed) + ' ' + \
-            self._content_profile.fmt(skipped) + ' ' + \
-            self._error_profile.fmt(failed) + \
-            self._format_profile.fmt(")")
+        text = (
+            self._format_profile.fmt("(")
+            + self._content_profile.fmt(group.complete_name)
+            + self._format_profile.fmt(":")
+            + self._success_profile.fmt(processed)
+            + " "
+            + self._content_profile.fmt(skipped)
+            + " "
+            + self._error_profile.fmt(failed)
+            + self._format_profile.fmt(")")
+        )
 
         return (text, size)
 
@@ -471,9 +468,9 @@ class _StatusHeader():
         remaining = line_length - size
         remaining -= 2
 
-        final_text = self._format_profile.fmt(fill * (remaining // 2)) + ' '
+        final_text = self._format_profile.fmt(fill * (remaining // 2)) + " "
         final_text += text
-        final_text += ' ' + self._format_profile.fmt(fill * (remaining // 2))
+        final_text += " " + self._format_profile.fmt(fill * (remaining // 2))
 
         return final_text
 
@@ -490,14 +487,13 @@ class _StatusHeader():
 #    format_profile (Profile): Formatting profile for formatting text
 #    elapsed (datetime): The offset into the session when this job is created
 #
-class _StatusJob():
-
+class _StatusJob:
     def __init__(self, context, action_name, full_name, content_profile, format_profile, elapsed):
         #
         # Public members
         #
-        self.action_name = action_name    # The action name
-        self.size = None                  # The number of characters required to render
+        self.action_name = action_name  # The action name
+        self.size = None  # The number of characters required to render
         self.full_name = full_name
 
         #
@@ -570,24 +566,26 @@ class _StatusJob():
     #    elapsed (datetime): The session elapsed time offset
     #
     def render(self, padding, elapsed):
-        text = self._format_profile.fmt('[') + \
-            self._time_code.render_time(elapsed - self._offset) + \
-            self._format_profile.fmt(']')
-
-        text += self._format_profile.fmt('[') + \
-            self._content_profile.fmt(self.action_name) + \
-            self._format_profile.fmt(':') + \
-            self._content_profile.fmt(self.full_name)
+        text = (
+            self._format_profile.fmt("[")
+            + self._time_code.render_time(elapsed - self._offset)
+            + self._format_profile.fmt("]")
+        )
+
+        text += (
+            self._format_profile.fmt("[")
+            + self._content_profile.fmt(self.action_name)
+            + self._format_profile.fmt(":")
+            + self._content_profile.fmt(self.full_name)
+        )
 
         if self._current_progress is not None:
-            text += self._format_profile.fmt(':') + \
-                self._content_profile.fmt(str(self._current_progress))
+            text += self._format_profile.fmt(":") + self._content_profile.fmt(str(self._current_progress))
             if self._maximum_progress is not None:
-                text += self._format_profile.fmt('/') + \
-                    self._content_profile.fmt(str(self._maximum_progress))
+                text += self._format_profile.fmt("/") + self._content_profile.fmt(str(self._maximum_progress))
 
         # Add padding before terminating ']'
-        terminator = (' ' * padding) + ']'
+        terminator = (" " * padding) + "]"
         text += self._format_profile.fmt(terminator)
 
         return text
diff --git a/src/buildstream/_frontend/widget.py b/src/buildstream/_frontend/widget.py
index 0a268b7..63fbfbb 100644
--- a/src/buildstream/_frontend/widget.py
+++ b/src/buildstream/_frontend/widget.py
@@ -45,8 +45,7 @@ ERROR_MESSAGES = [MessageType.FAIL, MessageType.ERROR, MessageType.BUG]
 #
 # An abstract class for printing output columns in our text UI.
 #
-class Widget():
-
+class Widget:
     def __init__(self, context, content_profile, format_profile):
 
         # The context
@@ -74,7 +73,6 @@ class Widget():
 
 # Used to add fixed text between columns
 class FixedText(Widget):
-
     def __init__(self, context, text, content_profile, format_profile):
         super().__init__(context, content_profile, format_profile)
         self.text = text
@@ -91,15 +89,13 @@ class WallclockTime(Widget):
 
     def render(self, message):
 
-        fields = [self.content_profile.fmt("{:02d}".format(x)) for x in
-                  [message.creation_time.hour,
-                   message.creation_time.minute,
-                   message.creation_time.second,
-                   ]
-                  ]
+        fields = [
+            self.content_profile.fmt("{:02d}".format(x))
+            for x in [message.creation_time.hour, message.creation_time.minute, message.creation_time.second,]
+        ]
         text = self.format_profile.fmt(":").join(fields)
 
-        if self._output_format == 'us':
+        if self._output_format == "us":
             text += self.content_profile.fmt(".{:06d}".format(message.creation_time.microsecond))
 
         return text
@@ -107,11 +103,10 @@ class WallclockTime(Widget):
 
 # A widget for rendering the debugging column
 class Debug(Widget):
-
     def render(self, message):
         element_name = "n/a" if message.element_name is None else message.element_name
 
-        text = self.format_profile.fmt('pid:')
+        text = self.format_profile.fmt("pid:")
         text += self.content_profile.fmt("{: <5}".format(message.pid))
         text += self.format_profile.fmt("element name:")
         text += self.content_profile.fmt("{: <30}".format(element_name))
@@ -130,19 +125,13 @@ class TimeCode(Widget):
 
     def render_time(self, elapsed):
         if elapsed is None:
-            fields = [
-                self.content_profile.fmt('--')
-                for i in range(3)
-            ]
+            fields = [self.content_profile.fmt("--") for i in range(3)]
         else:
             hours, remainder = divmod(int(elapsed.total_seconds()), 60 * 60)
             minutes, seconds = divmod(remainder, 60)
-            fields = [
-                self.content_profile.fmt("{0:02d}".format(field))
-                for field in [hours, minutes, seconds]
-            ]
+            fields = [self.content_profile.fmt("{0:02d}".format(field)) for field in [hours, minutes, seconds]]
 
-        text = self.format_profile.fmt(':').join(fields)
+        text = self.format_profile.fmt(":").join(fields)
 
         if self._microseconds:
             if elapsed is not None:
@@ -169,41 +158,43 @@ class TypeName(Widget):
     }
 
     def render(self, message):
-        return self.content_profile.fmt("{: <7}"
-                                        .format(message.message_type.upper()),
-                                        bold=True, dim=True,
-                                        fg=self._action_colors[message.message_type])
+        return self.content_profile.fmt(
+            "{: <7}".format(message.message_type.upper()),
+            bold=True,
+            dim=True,
+            fg=self._action_colors[message.message_type],
+        )
 
 
 # A widget for displaying the Element name
 class ElementName(Widget):
-
     def render(self, message):
         action_name = message.action_name
         element_name = message.element_name
         if element_name is not None:
-            name = '{: <30}'.format(element_name)
+            name = "{: <30}".format(element_name)
         else:
-            name = 'core activity'
-            name = '{: <30}'.format(name)
+            name = "core activity"
+            name = "{: <30}".format(name)
 
         if not action_name:
             action_name = "Main"
 
-        return self.content_profile.fmt("{: >8}".format(action_name.lower())) + \
-            self.format_profile.fmt(':') + self.content_profile.fmt(name)
+        return (
+            self.content_profile.fmt("{: >8}".format(action_name.lower()))
+            + self.format_profile.fmt(":")
+            + self.content_profile.fmt(name)
+        )
 
 
 # A widget for displaying the primary message text
 class MessageText(Widget):
-
     def render(self, message):
         return message.message
 
 
 # A widget for formatting the element cache key
 class CacheKey(Widget):
-
     def __init__(self, context, content_profile, format_profile, err_profile):
         super().__init__(context, content_profile, format_profile)
 
@@ -216,10 +207,10 @@ class CacheKey(Widget):
             return ""
 
         if message.element_name is None:
-            return ' ' * self._key_length
+            return " " * self._key_length
 
         missing = False
-        key = ' ' * self._key_length
+        key = " " * self._key_length
         if message.element_key:
             _, key, missing = message.element_key
 
@@ -233,7 +224,6 @@ class CacheKey(Widget):
 
 # A widget for formatting the log file
 class LogFile(Widget):
-
     def __init__(self, context, content_profile, format_profile, err_profile):
         super().__init__(context, content_profile, format_profile)
 
@@ -248,7 +238,7 @@ class LogFile(Widget):
             logfile = message.logfile
 
             if abbrev and self._logdir != "" and logfile.startswith(self._logdir):
-                logfile = logfile[len(self._logdir):]
+                logfile = logfile[len(self._logdir) :]
                 logfile = logfile.lstrip(os.sep)
 
             if message.message_type in ERROR_MESSAGES:
@@ -256,7 +246,7 @@ class LogFile(Widget):
             else:
                 text = self.content_profile.fmt(logfile, dim=True)
         else:
-            text = ''
+            text = ""
 
         return text
 
@@ -273,8 +263,7 @@ class MessageOrLogFile(Widget):
 
     def render(self, message):
         # Show the log file only in the main start/success messages
-        if message.logfile and message.scheduler and \
-                message.message_type in [MessageType.START, MessageType.SUCCESS]:
+        if message.logfile and message.scheduler and message.message_type in [MessageType.START, MessageType.SUCCESS]:
             text = self._logfile_widget.render(message)
         else:
             text = self._message_widget.render(message)
@@ -296,14 +285,9 @@ class MessageOrLogFile(Widget):
 #    indent (int): Number of spaces to use for general indentation
 #
 class LogLine(Widget):
-
-    def __init__(self, context, state,
-                 content_profile,
-                 format_profile,
-                 success_profile,
-                 err_profile,
-                 detail_profile,
-                 indent=4):
+    def __init__(
+        self, context, state, content_profile, format_profile, success_profile, err_profile, detail_profile, indent=4
+    ):
         super().__init__(context, content_profile, format_profile)
 
         self._columns = []
@@ -311,7 +295,7 @@ class LogLine(Widget):
         self._success_profile = success_profile
         self._err_profile = err_profile
         self._detail_profile = detail_profile
-        self._indent = ' ' * indent
+        self._indent = " " * indent
         self._log_lines = context.log_error_lines
         self._message_lines = context.log_message_lines
         self._resolved_keys = None
@@ -320,19 +304,17 @@ class LogLine(Widget):
         self._logfile_widget = LogFile(context, content_profile, format_profile, err_profile)
 
         if context.log_debug:
-            self._columns.extend([
-                Debug(context, content_profile, format_profile)
-            ])
+            self._columns.extend([Debug(context, content_profile, format_profile)])
 
         self.logfile_variable_names = {
             "elapsed": TimeCode(context, content_profile, format_profile, microseconds=False),
             "elapsed-us": TimeCode(context, content_profile, format_profile, microseconds=True),
             "wallclock": WallclockTime(context, content_profile, format_profile),
-            "wallclock-us": WallclockTime(context, content_profile, format_profile, output_format='us'),
+            "wallclock-us": WallclockTime(context, content_profile, format_profile, output_format="us"),
             "key": CacheKey(context, content_profile, format_profile, err_profile),
             "element": ElementName(context, content_profile, format_profile),
             "action": TypeName(context, content_profile, format_profile),
-            "message": MessageOrLogFile(context, content_profile, format_profile, err_profile)
+            "message": MessageOrLogFile(context, content_profile, format_profile, err_profile),
         }
         logfile_tokens = self._parse_logfile_format(context.log_message_format, content_profile, format_profile)
         self._columns.extend(logfile_tokens)
@@ -352,7 +334,7 @@ class LogLine(Widget):
     #    (str): The formatted list of elements
     #
     def show_pipeline(self, dependencies, format_):
-        report = ''
+        report = ""
         p = Profile()
 
         for element in dependencies:
@@ -360,57 +342,57 @@ class LogLine(Widget):
 
             full_key, cache_key, dim_keys = element._get_display_key()
 
-            line = p.fmt_subst(line, 'name', element._get_full_name(), fg='blue', bold=True)
-            line = p.fmt_subst(line, 'key', cache_key, fg='yellow', dim=dim_keys)
-            line = p.fmt_subst(line, 'full-key', full_key, fg='yellow', dim=dim_keys)
+            line = p.fmt_subst(line, "name", element._get_full_name(), fg="blue", bold=True)
+            line = p.fmt_subst(line, "key", cache_key, fg="yellow", dim=dim_keys)
+            line = p.fmt_subst(line, "full-key", full_key, fg="yellow", dim=dim_keys)
 
             consistency = element._get_consistency()
             if consistency == Consistency.INCONSISTENT:
-                line = p.fmt_subst(line, 'state', "no reference", fg='red')
+                line = p.fmt_subst(line, "state", "no reference", fg="red")
             else:
                 if element._cached_failure():
-                    line = p.fmt_subst(line, 'state', "failed", fg='red')
+                    line = p.fmt_subst(line, "state", "failed", fg="red")
                 elif element._cached_success():
-                    line = p.fmt_subst(line, 'state', "cached", fg='magenta')
+                    line = p.fmt_subst(line, "state", "cached", fg="magenta")
                 elif consistency == Consistency.RESOLVED and not element._source_cached():
-                    line = p.fmt_subst(line, 'state', "fetch needed", fg='red')
+                    line = p.fmt_subst(line, "state", "fetch needed", fg="red")
                 elif element._buildable():
-                    line = p.fmt_subst(line, 'state', "buildable", fg='green')
+                    line = p.fmt_subst(line, "state", "buildable", fg="green")
                 else:
-                    line = p.fmt_subst(line, 'state', "waiting", fg='blue')
+                    line = p.fmt_subst(line, "state", "waiting", fg="blue")
 
             # Element configuration
             if "%{config" in format_:
                 line = p.fmt_subst(
-                    line, 'config',
-                    yaml.round_trip_dump(element._Element__config, default_flow_style=False, allow_unicode=True))
+                    line,
+                    "config",
+                    yaml.round_trip_dump(element._Element__config, default_flow_style=False, allow_unicode=True),
+                )
 
             # Variables
             if "%{vars" in format_:
                 variables = element._Element__variables.flat
                 line = p.fmt_subst(
-                    line, 'vars',
-                    yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True))
+                    line, "vars", yaml.round_trip_dump(variables, default_flow_style=False, allow_unicode=True)
+                )
 
             # Environment
             if "%{env" in format_:
                 environment = element._Element__environment
                 line = p.fmt_subst(
-                    line, 'env',
-                    yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True))
+                    line, "env", yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True)
+                )
 
             # Public
             if "%{public" in format_:
                 environment = element._Element__public
                 line = p.fmt_subst(
-                    line, 'public',
-                    yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True))
+                    line, "public", yaml.round_trip_dump(environment, default_flow_style=False, allow_unicode=True)
+                )
 
             # Workspaced
             if "%{workspaced" in format_:
-                line = p.fmt_subst(
-                    line, 'workspaced',
-                    '(workspaced)' if element._get_workspace() else '', fg='yellow')
+                line = p.fmt_subst(line, "workspaced", "(workspaced)" if element._get_workspace() else "", fg="yellow")
 
             # Workspace-dirs
             if "%{workspace-dirs" in format_:
@@ -418,36 +400,31 @@ class LogLine(Widget):
                 if workspace is not None:
                     path = workspace.get_absolute_path()
                     if path.startswith("~/"):
-                        path = os.path.join(os.getenv('HOME', '/root'), path[2:])
-                    line = p.fmt_subst(line, 'workspace-dirs', "Workspace: {}".format(path))
+                        path = os.path.join(os.getenv("HOME", "/root"), path[2:])
+                    line = p.fmt_subst(line, "workspace-dirs", "Workspace: {}".format(path))
                 else:
-                    line = p.fmt_subst(
-                        line, 'workspace-dirs', '')
+                    line = p.fmt_subst(line, "workspace-dirs", "")
 
             # Dependencies
             if "%{deps" in format_:
                 deps = [e.name for e in element.dependencies(Scope.ALL, recurse=False)]
-                line = p.fmt_subst(
-                    line, 'deps',
-                    yaml.safe_dump(deps, default_style=None).rstrip('\n'))
+                line = p.fmt_subst(line, "deps", yaml.safe_dump(deps, default_style=None).rstrip("\n"))
 
             # Build Dependencies
             if "%{build-deps" in format_:
                 build_deps = [e.name for e in element.dependencies(Scope.BUILD, recurse=False)]
-                line = p.fmt_subst(
-                    line, 'build-deps',
-                    yaml.safe_dump(build_deps, default_style=False).rstrip('\n'))
+                line = p.fmt_subst(line, "build-deps", yaml.safe_dump(build_deps, default_style=False).rstrip("\n"))
 
             # Runtime Dependencies
             if "%{runtime-deps" in format_:
                 runtime_deps = [e.name for e in element.dependencies(Scope.RUN, recurse=False)]
                 line = p.fmt_subst(
-                    line, 'runtime-deps',
-                    yaml.safe_dump(runtime_deps, default_style=False).rstrip('\n'))
+                    line, "runtime-deps", yaml.safe_dump(runtime_deps, default_style=False).rstrip("\n")
+                )
 
-            report += line + '\n'
+            report += line + "\n"
 
-        return report.rstrip('\n')
+        return report.rstrip("\n")
 
     # print_heading()
     #
@@ -463,25 +440,24 @@ class LogLine(Widget):
     def print_heading(self, project, stream, *, log_file):
         context = self.context
         starttime = datetime.datetime.now()
-        text = ''
+        text = ""
 
         self._resolved_keys = {element: element._get_cache_key() for element in stream.session_elements}
 
         # Main invocation context
-        text += '\n'
+        text += "\n"
         text += self.content_profile.fmt("BuildStream Version {}\n".format(bst_version), bold=True)
         values = OrderedDict()
-        values["Session Start"] = starttime.strftime('%A, %d-%m-%Y at %H:%M:%S')
+        values["Session Start"] = starttime.strftime("%A, %d-%m-%Y at %H:%M:%S")
         values["Project"] = "{} ({})".format(project.name, project.directory)
         values["Targets"] = ", ".join([t.name for t in stream.targets])
         text += self._format_values(values)
 
         # User configurations
-        text += '\n'
+        text += "\n"
         text += self.content_profile.fmt("User Configuration\n", bold=True)
         values = OrderedDict()
-        values["Configuration File"] = \
-            "Default Configuration" if not context.config_origin else context.config_origin
+        values["Configuration File"] = "Default Configuration" if not context.config_origin else context.config_origin
         values["Cache Directory"] = context.cachedir
         values["Log Files"] = context.logdir
         values["Source Mirrors"] = context.sourcedir
@@ -492,7 +468,7 @@ class LogLine(Widget):
         values["Maximum Push Tasks"] = context.sched_pushers
         values["Maximum Network Retries"] = context.sched_network_retries
         text += self._format_values(values)
-        text += '\n'
+        text += "\n"
 
         # Project Options
         values = OrderedDict()
@@ -500,22 +476,25 @@ class LogLine(Widget):
         if values:
             text += self.content_profile.fmt("Project Options\n", bold=True)
             text += self._format_values(values)
-            text += '\n'
+            text += "\n"
 
         # Plugins
-        text += self._format_plugins(project.first_pass_config.element_factory.loaded_dependencies,
-                                     project.first_pass_config.source_factory.loaded_dependencies)
+        text += self._format_plugins(
+            project.first_pass_config.element_factory.loaded_dependencies,
+            project.first_pass_config.source_factory.loaded_dependencies,
+        )
         if project.config.element_factory and project.config.source_factory:
-            text += self._format_plugins(project.config.element_factory.loaded_dependencies,
-                                         project.config.source_factory.loaded_dependencies)
+            text += self._format_plugins(
+                project.config.element_factory.loaded_dependencies, project.config.source_factory.loaded_dependencies
+            )
 
         # Pipeline state
         text += self.content_profile.fmt("Pipeline\n", bold=True)
         text += self.show_pipeline(stream.total_elements, context.log_element_format)
-        text += '\n'
+        text += "\n"
 
         # Separator line before following output
-        text += self.format_profile.fmt("=" * 79 + '\n')
+        text += self.format_profile.fmt("=" * 79 + "\n")
 
         click.echo(text, nl=False, err=True)
         if log_file:
@@ -537,7 +516,7 @@ class LogLine(Widget):
         if not self._state.task_groups:
             return
 
-        text = ''
+        text = ""
 
         assert self._resolved_keys is not None
         elements = sorted(e for (e, k) in self._resolved_keys.items() if k != e._get_cache_key())
@@ -554,7 +533,7 @@ class LogLine(Widget):
                     # Exclude the failure messages if the job didn't ultimately fail
                     # (e.g. succeeded on retry)
                     if element_name in group.failed_tasks:
-                        values[element_name] = ''.join(self._render(v) for v in messages)
+                        values[element_name] = "".join(self._render(v) for v in messages)
 
             if values:
                 text += self.content_profile.fmt("Failure Summary\n", bold=True)
@@ -563,8 +542,8 @@ class LogLine(Widget):
         text += self.content_profile.fmt("Pipeline Summary\n", bold=True)
         values = OrderedDict()
 
-        values['Total'] = self.content_profile.fmt(str(len(stream.total_elements)))
-        values['Session'] = self.content_profile.fmt(str(len(stream.session_elements)))
+        values["Total"] = self.content_profile.fmt(str(len(stream.total_elements)))
+        values["Session"] = self.content_profile.fmt(str(len(stream.session_elements)))
 
         processed_maxlen = 1
         skipped_maxlen = 1
@@ -579,20 +558,25 @@ class LogLine(Widget):
             skipped = str(group.skipped_tasks)
             failed = str(len(group.failed_tasks))
 
-            processed_align = ' ' * (processed_maxlen - len(processed))
-            skipped_align = ' ' * (skipped_maxlen - len(skipped))
-            failed_align = ' ' * (failed_maxlen - len(failed))
-
-            status_text = self.content_profile.fmt("processed ") + \
-                self._success_profile.fmt(processed) + \
-                self.format_profile.fmt(', ') + processed_align
-
-            status_text += self.content_profile.fmt("skipped ") + \
-                self.content_profile.fmt(skipped) + \
-                self.format_profile.fmt(', ') + skipped_align
-
-            status_text += self.content_profile.fmt("failed ") + \
-                self._err_profile.fmt(failed) + ' ' + failed_align
+            processed_align = " " * (processed_maxlen - len(processed))
+            skipped_align = " " * (skipped_maxlen - len(skipped))
+            failed_align = " " * (failed_maxlen - len(failed))
+
+            status_text = (
+                self.content_profile.fmt("processed ")
+                + self._success_profile.fmt(processed)
+                + self.format_profile.fmt(", ")
+                + processed_align
+            )
+
+            status_text += (
+                self.content_profile.fmt("skipped ")
+                + self.content_profile.fmt(skipped)
+                + self.format_profile.fmt(", ")
+                + skipped_align
+            )
+
+            status_text += self.content_profile.fmt("failed ") + self._err_profile.fmt(failed) + " " + failed_align
             values["{} Queue".format(group.name)] = status_text
 
         text += self._format_values(values, style_value=False)
@@ -627,7 +611,7 @@ class LogLine(Widget):
             m = re.search(r"^%\{([^\}]+)\}", format_string)
             if m is not None:
                 variable = m.group(1)
-                format_string = format_string[m.end(0):]
+                format_string = format_string[m.end(0) :]
                 if variable not in self.logfile_variable_names:
                     raise Exception("'{0}' is not a valid log variable name.".format(variable))
                 logfile_tokens.append(self.logfile_variable_names[variable])
@@ -635,7 +619,7 @@ class LogLine(Widget):
                 m = re.search("^[^%]+", format_string)
                 if m is not None:
                     text = FixedText(self.context, m.group(0), content_profile, format_profile)
-                    format_string = format_string[m.end(0):]
+                    format_string = format_string[m.end(0) :]
                     logfile_tokens.append(text)
                 else:
                     # No idea what to do now
@@ -645,11 +629,11 @@ class LogLine(Widget):
     def _render(self, message):
 
         # Render the column widgets first
-        text = ''
+        text = ""
         for widget in self._columns:
             text += widget.render(message)
 
-        text += '\n'
+        text += "\n"
 
         extra_nl = False
 
@@ -664,51 +648,53 @@ class LogLine(Widget):
 
             n_lines = len(lines)
             abbrev = False
-            if message.message_type not in ERROR_MESSAGES \
-               and not frontend_message and n_lines > self._message_lines:
-                lines = lines[0:self._message_lines]
+            if message.message_type not in ERROR_MESSAGES and not frontend_message and n_lines > self._message_lines:
+                lines = lines[0 : self._message_lines]
                 if self._message_lines > 0:
                     abbrev = True
             else:
-                lines[n_lines - 1] = lines[n_lines - 1].rstrip('\n')
+                lines[n_lines - 1] = lines[n_lines - 1].rstrip("\n")
 
             detail = self._indent + self._indent.join(lines)
 
-            text += '\n'
+            text += "\n"
             if message.message_type in ERROR_MESSAGES:
                 text += self._err_profile.fmt(detail, bold=True)
             else:
                 text += self._detail_profile.fmt(detail)
 
             if abbrev:
-                text += self._indent + \
-                    self.content_profile.fmt('Message contains {} additional lines'
-                                             .format(n_lines - self._message_lines), dim=True)
-            text += '\n'
+                text += self._indent + self.content_profile.fmt(
+                    "Message contains {} additional lines".format(n_lines - self._message_lines), dim=True
+                )
+            text += "\n"
 
             extra_nl = True
 
         if message.scheduler and message.message_type == MessageType.FAIL:
-            text += '\n'
+            text += "\n"
 
             if self.context is not None and not self.context.log_verbose:
                 text += self._indent + self._err_profile.fmt("Log file: ")
-                text += self._indent + self._logfile_widget.render(message) + '\n'
+                text += self._indent + self._logfile_widget.render(message) + "\n"
             elif self._log_lines > 0:
-                text += self._indent + self._err_profile.fmt("Printing the last {} lines from log file:"
-                                                             .format(self._log_lines)) + '\n'
-                text += self._indent + self._logfile_widget.render_abbrev(message, abbrev=False) + '\n'
-                text += self._indent + self._err_profile.fmt("=" * 70) + '\n'
+                text += (
+                    self._indent
+                    + self._err_profile.fmt("Printing the last {} lines from log file:".format(self._log_lines))
+                    + "\n"
+                )
+                text += self._indent + self._logfile_widget.render_abbrev(message, abbrev=False) + "\n"
+                text += self._indent + self._err_profile.fmt("=" * 70) + "\n"
 
                 log_content = self._read_last_lines(message.logfile)
                 log_content = textwrap.indent(log_content, self._indent)
                 text += self._detail_profile.fmt(log_content)
-                text += '\n'
-                text += self._indent + self._err_profile.fmt("=" * 70) + '\n'
+                text += "\n"
+                text += self._indent + self._err_profile.fmt("=" * 70) + "\n"
             extra_nl = True
 
         if extra_nl:
-            text += '\n'
+            text += "\n"
 
         return text
 
@@ -716,14 +702,14 @@ class LogLine(Widget):
         with ExitStack() as stack:
             # mmap handles low-level memory details, allowing for
             # faster searches
-            f = stack.enter_context(open(logfile, 'r+'))
+            f = stack.enter_context(open(logfile, "r+"))
             log = stack.enter_context(mmap(f.fileno(), os.path.getsize(f.name)))
 
             count = 0
             end = log.size() - 1
 
             while count < self._log_lines and end >= 0:
-                location = log.rfind(b'\n', 0, end)
+                location = log.rfind(b"\n", 0, end)
                 count += 1
 
                 # If location is -1 (none found), this will print the
@@ -735,8 +721,8 @@ class LogLine(Widget):
             # then we get the first characther. If end is a newline position,
             # we discard it and only want to print the beginning of the next
             # line.
-            lines = log[(end + 1):].splitlines()
-            return '\n'.join([line.decode('utf-8') for line in lines]).rstrip()
+            lines = log[(end + 1) :].splitlines()
+            return "\n".join([line.decode("utf-8") for line in lines]).rstrip()
 
     def _format_plugins(self, element_plugins, source_plugins):
         text = ""
@@ -756,7 +742,7 @@ class LogLine(Widget):
             for plugin in source_plugins:
                 text += self.content_profile.fmt("    - {}\n".format(plugin))
 
-        text += '\n'
+        text += "\n"
 
         return text
 
@@ -773,23 +759,23 @@ class LogLine(Widget):
     #    (str): The formatted values
     #
     def _format_values(self, values, style_value=True):
-        text = ''
+        text = ""
         max_key_len = 0
         for key, value in values.items():
             max_key_len = max(len(key), max_key_len)
 
         for key, value in values.items():
-            if isinstance(value, str) and '\n' in value:
+            if isinstance(value, str) and "\n" in value:
                 text += self.format_profile.fmt("  {}:\n".format(key))
                 text += textwrap.indent(value, self._indent)
                 continue
 
-            text += self.format_profile.fmt("  {}: {}".format(key, ' ' * (max_key_len - len(key))))
+            text += self.format_profile.fmt("  {}: {}".format(key, " " * (max_key_len - len(key))))
             if style_value:
                 text += self.content_profile.fmt(str(value))
             else:
                 text += str(value)
-            text += '\n'
+            text += "\n"
 
         return text
 
@@ -806,20 +792,20 @@ class LogLine(Widget):
     #    (str): The formatted values
     #
     def _pretty_print_dictionary(self, values, long_=False, style_value=True):
-        text = ''
+        text = ""
         max_key_len = 0
         try:
             max_key_len = max(len(key) for key in values.keys())
         except ValueError:
-            text = ''
+            text = ""
 
         for key, value in values.items():
-            if isinstance(value, str) and '\n' in value:
+            if isinstance(value, str) and "\n" in value:
                 text += self.format_profile.fmt("  {}:".format(key))
                 text += textwrap.indent(value, self._indent)
                 continue
 
-            text += self.format_profile.fmt("  {}:{}".format(key, ' ' * (max_key_len - len(key))))
+            text += self.format_profile.fmt("  {}:{}".format(key, " " * (max_key_len - len(key))))
 
             value_list = "\n\t" + "\n\t".join((self._get_filestats(v, list_long=long_) for v in value))
             if value == []:
@@ -832,7 +818,7 @@ class LogLine(Widget):
                 text += self.content_profile.fmt(value_list)
             else:
                 text += value_list
-            text += '\n'
+            text += "\n"
 
         return text
 
@@ -854,22 +840,22 @@ class LogLine(Widget):
     #                              cached status of
     #
     def show_state_of_artifacts(self, targets):
-        report = ''
+        report = ""
         p = Profile()
         for element in targets:
-            line = '%{state: >12} %{name}'
-            line = p.fmt_subst(line, 'name', element.name, fg='yellow')
+            line = "%{state: >12} %{name}"
+            line = p.fmt_subst(line, "name", element.name, fg="yellow")
 
             if element._cached_success():
-                line = p.fmt_subst(line, 'state', "cached", fg='magenta')
+                line = p.fmt_subst(line, "state", "cached", fg="magenta")
             elif element._cached():
-                line = p.fmt_subst(line, 'state', "failed", fg='red')
+                line = p.fmt_subst(line, "state", "failed", fg="red")
             elif element._cached_remotely():
-                line = p.fmt_subst(line, 'state', "available", fg='green')
+                line = p.fmt_subst(line, "state", "available", fg="green")
             else:
-                line = p.fmt_subst(line, 'state', "not cached", fg='bright_red')
+                line = p.fmt_subst(line, "state", "not cached", fg="bright_red")
 
-            report += line + '\n'
+            report += line + "\n"
 
         return report
 
@@ -890,15 +876,27 @@ class LogLine(Widget):
             # Support files up to 99G, meaning maximum characters is 11
             max_v_len = 11
             if entry["type"] == _FileType.DIRECTORY:
-                return "drwxr-xr-x  dir    {}".format(entry["size"]) +\
-                       "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+                return (
+                    "drwxr-xr-x  dir    {}".format(entry["size"])
+                    + "{} ".format(" " * (max_v_len - len(size)))
+                    + "{}".format(entry["name"])
+                )
             elif entry["type"] == _FileType.SYMLINK:
-                return "lrwxrwxrwx  link   {}".format(entry["size"]) +\
-                       "{} ".format(' ' * (max_v_len - len(size))) + "{} -> {}".format(entry["name"], entry["target"])
+                return (
+                    "lrwxrwxrwx  link   {}".format(entry["size"])
+                    + "{} ".format(" " * (max_v_len - len(size)))
+                    + "{} -> {}".format(entry["name"], entry["target"])
+                )
             elif entry["executable"]:
-                return "-rwxr-xr-x  exe    {}".format(entry["size"]) +\
-                       "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+                return (
+                    "-rwxr-xr-x  exe    {}".format(entry["size"])
+                    + "{} ".format(" " * (max_v_len - len(size)))
+                    + "{}".format(entry["name"])
+                )
             else:
-                return "-rw-r--r--  reg    {}".format(entry["size"]) +\
-                       "{} ".format(' ' * (max_v_len - len(size))) + "{}".format(entry["name"])
+                return (
+                    "-rw-r--r--  reg    {}".format(entry["size"])
+                    + "{} ".format(" " * (max_v_len - len(size)))
+                    + "{}".format(entry["name"])
+                )
         return entry["name"]
diff --git a/src/buildstream/_gitsourcebase.py b/src/buildstream/_gitsourcebase.py
index 120d8c72..4e9e591 100644
--- a/src/buildstream/_gitsourcebase.py
+++ b/src/buildstream/_gitsourcebase.py
@@ -35,7 +35,7 @@ from . import utils
 from .types import FastEnum
 from .utils import move_atomic, DirectoryExistsError
 
-GIT_MODULES = '.gitmodules'
+GIT_MODULES = ".gitmodules"
 
 # Warnings
 WARN_INCONSISTENT_SUBMODULE = "inconsistent-submodule"
@@ -53,7 +53,6 @@ class _RefFormat(FastEnum):
 # might have at a given time
 #
 class _GitMirror(SourceFetcher):
-
     def __init__(self, source, path, url, ref, *, primary=False, tags=[]):
 
         super().__init__()
@@ -80,59 +79,64 @@ class _GitMirror(SourceFetcher):
             # system configured tmpdir is not on the same partition.
             #
             with self.source.tempdir() as tmpdir:
-                url = self.source.translate_url(self.url, alias_override=alias_override,
-                                                primary=self.primary)
-                self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
-                                 fail="Failed to clone git repository {}".format(url),
-                                 fail_temporarily=True)
+                url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
+                self.source.call(
+                    [self.source.host_git, "clone", "--mirror", "-n", url, tmpdir],
+                    fail="Failed to clone git repository {}".format(url),
+                    fail_temporarily=True,
+                )
 
                 try:
                     move_atomic(tmpdir, self.mirror)
                 except DirectoryExistsError:
                     # Another process was quicker to download this repository.
                     # Let's discard our own
-                    self.source.status("{}: Discarding duplicate clone of {}"
-                                       .format(self.source, url))
+                    self.source.status("{}: Discarding duplicate clone of {}".format(self.source, url))
                 except OSError as e:
-                    raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}': {}"
-                                      .format(self.source, url, tmpdir, self.mirror, e)) from e
+                    raise SourceError(
+                        "{}: Failed to move cloned git repository {} from '{}' to '{}': {}".format(
+                            self.source, url, tmpdir, self.mirror, e
+                        )
+                    ) from e
 
     def _fetch(self, alias_override=None):
-        url = self.source.translate_url(self.url,
-                                        alias_override=alias_override,
-                                        primary=self.primary)
+        url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
 
         if alias_override:
             remote_name = utils.url_directory_name(alias_override)
             _, remotes = self.source.check_output(
-                [self.source.host_git, 'remote'],
+                [self.source.host_git, "remote"],
                 fail="Failed to retrieve list of remotes in {}".format(self.mirror),
-                cwd=self.mirror
+                cwd=self.mirror,
             )
             if remote_name not in remotes:
                 self.source.call(
-                    [self.source.host_git, 'remote', 'add', remote_name, url],
+                    [self.source.host_git, "remote", "add", remote_name, url],
                     fail="Failed to add remote {} with url {}".format(remote_name, url),
-                    cwd=self.mirror
+                    cwd=self.mirror,
                 )
         else:
             remote_name = "origin"
 
-        self.source.call([self.source.host_git, 'fetch', remote_name, '--prune',
-                          '+refs/heads/*:refs/heads/*', '+refs/tags/*:refs/tags/*'],
-                         fail="Failed to fetch from remote git repository: {}".format(url),
-                         fail_temporarily=True,
-                         cwd=self.mirror)
+        self.source.call(
+            [
+                self.source.host_git,
+                "fetch",
+                remote_name,
+                "--prune",
+                "+refs/heads/*:refs/heads/*",
+                "+refs/tags/*:refs/tags/*",
+            ],
+            fail="Failed to fetch from remote git repository: {}".format(url),
+            fail_temporarily=True,
+            cwd=self.mirror,
+        )
 
     def fetch(self, alias_override=None):  # pylint: disable=arguments-differ
         # Resolve the URL for the message
-        resolved_url = self.source.translate_url(self.url,
-                                                 alias_override=alias_override,
-                                                 primary=self.primary)
+        resolved_url = self.source.translate_url(self.url, alias_override=alias_override, primary=self.primary)
 
-        with self.source.timed_activity("Fetching from {}"
-                                        .format(resolved_url),
-                                        silent_nested=True):
+        with self.source.timed_activity("Fetching from {}".format(resolved_url), silent_nested=True):
             self.ensure(alias_override)
             if not self.has_ref():
                 self._fetch(alias_override)
@@ -147,48 +151,49 @@ class _GitMirror(SourceFetcher):
             return False
 
         # Check if the ref is really there
-        rc = self.source.call([self.source.host_git, 'cat-file', '-t', self.ref], cwd=self.mirror)
+        rc = self.source.call([self.source.host_git, "cat-file", "-t", self.ref], cwd=self.mirror)
         return rc == 0
 
     def assert_ref(self):
         if not self.has_ref():
-            raise SourceError("{}: expected ref '{}' was not found in git repository: '{}'"
-                              .format(self.source, self.ref, self.url))
+            raise SourceError(
+                "{}: expected ref '{}' was not found in git repository: '{}'".format(self.source, self.ref, self.url)
+            )
 
     def latest_commit_with_tags(self, tracking, track_tags=False):
         _, output = self.source.check_output(
-            [self.source.host_git, 'rev-parse', tracking],
+            [self.source.host_git, "rev-parse", tracking],
             fail="Unable to find commit for specified branch name '{}'".format(tracking),
-            cwd=self.mirror)
-        ref = output.rstrip('\n')
+            cwd=self.mirror,
+        )
+        ref = output.rstrip("\n")
 
         if self.source.ref_format == _RefFormat.GIT_DESCRIBE:
             # Prefix the ref with the closest tag, if available,
             # to make the ref human readable
             exit_code, output = self.source.check_output(
-                [self.source.host_git, 'describe', '--tags', '--abbrev=40', '--long', ref],
-                cwd=self.mirror)
+                [self.source.host_git, "describe", "--tags", "--abbrev=40", "--long", ref], cwd=self.mirror
+            )
             if exit_code == 0:
-                ref = output.rstrip('\n')
+                ref = output.rstrip("\n")
 
         if not track_tags:
             return ref, []
 
         tags = set()
-        for options in [[], ['--first-parent'], ['--tags'], ['--tags', '--first-parent']]:
+        for options in [[], ["--first-parent"], ["--tags"], ["--tags", "--first-parent"]]:
             exit_code, output = self.source.check_output(
-                [self.source.host_git, 'describe', '--abbrev=0', ref, *options],
-                cwd=self.mirror)
+                [self.source.host_git, "describe", "--abbrev=0", ref, *options], cwd=self.mirror
+            )
             if exit_code == 0:
                 tag = output.strip()
                 _, commit_ref = self.source.check_output(
-                    [self.source.host_git, 'rev-parse', tag + '^{commit}'],
+                    [self.source.host_git, "rev-parse", tag + "^{commit}"],
                     fail="Unable to resolve tag '{}'".format(tag),
-                    cwd=self.mirror)
-                exit_code = self.source.call(
-                    [self.source.host_git, 'cat-file', 'tag', tag],
-                    cwd=self.mirror)
-                annotated = (exit_code == 0)
+                    cwd=self.mirror,
+                )
+                exit_code = self.source.call([self.source.host_git, "cat-file", "tag", tag], cwd=self.mirror)
+                annotated = exit_code == 0
 
                 tags.add((tag, commit_ref.strip(), annotated))
 
@@ -200,13 +205,17 @@ class _GitMirror(SourceFetcher):
         # Using --shared here avoids copying the objects into the checkout, in any
         # case we're just checking out a specific commit and then removing the .git/
         # directory.
-        self.source.call([self.source.host_git, 'clone', '--no-checkout', '--shared', self.mirror, fullpath],
-                         fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
-                         fail_temporarily=True)
-
-        self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
-                         fail="Failed to checkout git ref {}".format(self.ref),
-                         cwd=fullpath)
+        self.source.call(
+            [self.source.host_git, "clone", "--no-checkout", "--shared", self.mirror, fullpath],
+            fail="Failed to create git mirror {} in directory: {}".format(self.mirror, fullpath),
+            fail_temporarily=True,
+        )
+
+        self.source.call(
+            [self.source.host_git, "checkout", "--force", self.ref],
+            fail="Failed to checkout git ref {}".format(self.ref),
+            cwd=fullpath,
+        )
 
         # Remove .git dir
         shutil.rmtree(os.path.join(fullpath, ".git"))
@@ -217,34 +226,37 @@ class _GitMirror(SourceFetcher):
         fullpath = os.path.join(directory, self.path)
         url = self.source.translate_url(self.url)
 
-        self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
-                         fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
-                         fail_temporarily=True)
+        self.source.call(
+            [self.source.host_git, "clone", "--no-checkout", self.mirror, fullpath],
+            fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath),
+            fail_temporarily=True,
+        )
 
-        self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
-                         fail='Failed to add remote origin "{}"'.format(url),
-                         cwd=fullpath)
+        self.source.call(
+            [self.source.host_git, "remote", "set-url", "origin", url],
+            fail='Failed to add remote origin "{}"'.format(url),
+            cwd=fullpath,
+        )
 
-        self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
-                         fail="Failed to checkout git ref {}".format(self.ref),
-                         cwd=fullpath)
+        self.source.call(
+            [self.source.host_git, "checkout", "--force", self.ref],
+            fail="Failed to checkout git ref {}".format(self.ref),
+            cwd=fullpath,
+        )
 
     # List the submodules (path/url tuples) present at the given ref of this repo
     def submodule_list(self):
         modules = "{}:{}".format(self.ref, GIT_MODULES)
-        exit_code, output = self.source.check_output(
-            [self.source.host_git, 'show', modules], cwd=self.mirror)
+        exit_code, output = self.source.check_output([self.source.host_git, "show", modules], cwd=self.mirror)
 
         # If git show reports error code 128 here, we take it to mean there is
         # no .gitmodules file to display for the given revision.
         if exit_code == 128:
             return
         elif exit_code != 0:
-            raise SourceError(
-                "{plugin}: Failed to show gitmodules at ref {ref}".format(
-                    plugin=self, ref=self.ref))
+            raise SourceError("{plugin}: Failed to show gitmodules at ref {ref}".format(plugin=self, ref=self.ref))
 
-        content = '\n'.join([l.strip() for l in output.splitlines()])
+        content = "\n".join([l.strip() for l in output.splitlines()])
 
         io = StringIO(content)
         parser = RawConfigParser()
@@ -253,8 +265,8 @@ class _GitMirror(SourceFetcher):
         for section in parser.sections():
             # validate section name against the 'submodule "foo"' pattern
             if re.match(r'submodule "(.*)"', section):
-                path = parser.get(section, 'path')
-                url = parser.get(section, 'url')
+                path = parser.get(section, "path")
+                url = parser.get(section, "url")
 
                 yield (path, url)
 
@@ -266,31 +278,37 @@ class _GitMirror(SourceFetcher):
 
         # list objects in the parent repo tree to find the commit
         # object that corresponds to the submodule
-        _, output = self.source.check_output([self.source.host_git, 'ls-tree', ref, submodule],
-                                             fail="ls-tree failed for commit {} and submodule: {}".format(
-                                                 ref, submodule),
-                                             cwd=self.mirror)
+        _, output = self.source.check_output(
+            [self.source.host_git, "ls-tree", ref, submodule],
+            fail="ls-tree failed for commit {} and submodule: {}".format(ref, submodule),
+            cwd=self.mirror,
+        )
 
         # read the commit hash from the output
         fields = output.split()
-        if len(fields) >= 2 and fields[1] == 'commit':
+        if len(fields) >= 2 and fields[1] == "commit":
             submodule_commit = output.split()[2]
 
             # fail if the commit hash is invalid
             if len(submodule_commit) != 40:
-                raise SourceError("{}: Error reading commit information for submodule '{}'"
-                                  .format(self.source, submodule))
+                raise SourceError(
+                    "{}: Error reading commit information for submodule '{}'".format(self.source, submodule)
+                )
 
             return submodule_commit
 
         else:
-            detail = "The submodule '{}' is defined either in the BuildStream source\n".format(submodule) + \
-                     "definition, or in a .gitmodules file. But the submodule was never added to the\n" + \
-                     "underlying git repository with `git submodule add`."
+            detail = (
+                "The submodule '{}' is defined either in the BuildStream source\n".format(submodule)
+                + "definition, or in a .gitmodules file. But the submodule was never added to the\n"
+                + "underlying git repository with `git submodule add`."
+            )
 
-            self.source.warn("{}: Ignoring inconsistent submodule '{}'"
-                             .format(self.source, submodule), detail=detail,
-                             warning_token=WARN_INCONSISTENT_SUBMODULE)
+            self.source.warn(
+                "{}: Ignoring inconsistent submodule '{}'".format(self.source, submodule),
+                detail=detail,
+                warning_token=WARN_INCONSISTENT_SUBMODULE,
+            )
 
             return None
 
@@ -307,17 +325,24 @@ class _GitMirror(SourceFetcher):
                     # rev-list does not work in case of same rev
                     shallow.add(self.ref)
                 else:
-                    _, out = self.source.check_output([self.source.host_git, 'rev-list',
-                                                       '--ancestry-path', '--boundary',
-                                                       '{}..{}'.format(commit_ref, self.ref)],
-                                                      fail="Failed to get git history {}..{} in directory: {}"
-                                                      .format(commit_ref, self.ref, fullpath),
-                                                      fail_temporarily=True,
-                                                      cwd=self.mirror)
+                    _, out = self.source.check_output(
+                        [
+                            self.source.host_git,
+                            "rev-list",
+                            "--ancestry-path",
+                            "--boundary",
+                            "{}..{}".format(commit_ref, self.ref),
+                        ],
+                        fail="Failed to get git history {}..{} in directory: {}".format(
+                            commit_ref, self.ref, fullpath
+                        ),
+                        fail_temporarily=True,
+                        cwd=self.mirror,
+                    )
                     self.source.warn("refs {}..{}: {}".format(commit_ref, self.ref, out.splitlines()))
                     for line in out.splitlines():
-                        rev = line.lstrip('-')
-                        if line[0] == '-':
+                        rev = line.lstrip("-")
+                        if line[0] == "-":
                             shallow.add(rev)
                         else:
                             included.add(rev)
@@ -325,52 +350,64 @@ class _GitMirror(SourceFetcher):
             shallow -= included
             included |= shallow
 
-            self.source.call([self.source.host_git, 'init'],
-                             fail="Cannot initialize git repository: {}".format(fullpath),
-                             cwd=fullpath)
+            self.source.call(
+                [self.source.host_git, "init"],
+                fail="Cannot initialize git repository: {}".format(fullpath),
+                cwd=fullpath,
+            )
 
             for rev in included:
                 with TemporaryFile(dir=tmpdir) as commit_file:
-                    self.source.call([self.source.host_git, 'cat-file', 'commit', rev],
-                                     stdout=commit_file,
-                                     fail="Failed to get commit {}".format(rev),
-                                     cwd=self.mirror)
+                    self.source.call(
+                        [self.source.host_git, "cat-file", "commit", rev],
+                        stdout=commit_file,
+                        fail="Failed to get commit {}".format(rev),
+                        cwd=self.mirror,
+                    )
                     commit_file.seek(0, 0)
-                    self.source.call([self.source.host_git, 'hash-object', '-w', '-t', 'commit', '--stdin'],
-                                     stdin=commit_file,
-                                     fail="Failed to add commit object {}".format(rev),
-                                     cwd=fullpath)
-
-            with open(os.path.join(fullpath, '.git', 'shallow'), 'w') as shallow_file:
+                    self.source.call(
+                        [self.source.host_git, "hash-object", "-w", "-t", "commit", "--stdin"],
+                        stdin=commit_file,
+                        fail="Failed to add commit object {}".format(rev),
+                        cwd=fullpath,
+                    )
+
+            with open(os.path.join(fullpath, ".git", "shallow"), "w") as shallow_file:
                 for rev in shallow:
-                    shallow_file.write('{}\n'.format(rev))
+                    shallow_file.write("{}\n".format(rev))
 
             for tag, commit_ref, annotated in self.tags:
                 if annotated:
                     with TemporaryFile(dir=tmpdir) as tag_file:
-                        tag_data = 'object {}\ntype commit\ntag {}\n'.format(commit_ref, tag)
-                        tag_file.write(tag_data.encode('ascii'))
+                        tag_data = "object {}\ntype commit\ntag {}\n".format(commit_ref, tag)
+                        tag_file.write(tag_data.encode("ascii"))
                         tag_file.seek(0, 0)
                         _, tag_ref = self.source.check_output(
-                            [self.source.host_git, 'hash-object', '-w', '-t',
-                             'tag', '--stdin'],
+                            [self.source.host_git, "hash-object", "-w", "-t", "tag", "--stdin"],
                             stdin=tag_file,
                             fail="Failed to add tag object {}".format(tag),
-                            cwd=fullpath)
-
-                    self.source.call([self.source.host_git, 'tag', tag, tag_ref.strip()],
-                                     fail="Failed to tag: {}".format(tag),
-                                     cwd=fullpath)
+                            cwd=fullpath,
+                        )
+
+                    self.source.call(
+                        [self.source.host_git, "tag", tag, tag_ref.strip()],
+                        fail="Failed to tag: {}".format(tag),
+                        cwd=fullpath,
+                    )
                 else:
-                    self.source.call([self.source.host_git, 'tag', tag, commit_ref],
-                                     fail="Failed to tag: {}".format(tag),
-                                     cwd=fullpath)
+                    self.source.call(
+                        [self.source.host_git, "tag", tag, commit_ref],
+                        fail="Failed to tag: {}".format(tag),
+                        cwd=fullpath,
+                    )
 
-            with open(os.path.join(fullpath, '.git', 'HEAD'), 'w') as head:
-                self.source.call([self.source.host_git, 'rev-parse', self.ref],
-                                 stdout=head,
-                                 fail="Failed to parse commit {}".format(self.ref),
-                                 cwd=self.mirror)
+            with open(os.path.join(fullpath, ".git", "HEAD"), "w") as head:
+                self.source.call(
+                    [self.source.host_git, "rev-parse", self.ref],
+                    stdout=head,
+                    fail="Failed to parse commit {}".format(self.ref),
+                    cwd=self.mirror,
+                )
 
 
 class _GitSourceBase(Source):
@@ -382,58 +419,57 @@ class _GitSourceBase(Source):
     BST_MIRROR_CLASS = _GitMirror
 
     def configure(self, node):
-        ref = node.get_str('ref', None)
+        ref = node.get_str("ref", None)
 
-        config_keys = ['url', 'track', 'ref', 'submodules',
-                       'checkout-submodules', 'ref-format',
-                       'track-tags', 'tags']
+        config_keys = ["url", "track", "ref", "submodules", "checkout-submodules", "ref-format", "track-tags", "tags"]
         node.validate_keys(config_keys + Source.COMMON_CONFIG_KEYS)
 
-        tags_node = node.get_sequence('tags', [])
+        tags_node = node.get_sequence("tags", [])
         for tag_node in tags_node:
-            tag_node.validate_keys(['tag', 'commit', 'annotated'])
+            tag_node.validate_keys(["tag", "commit", "annotated"])
 
         tags = self._load_tags(node)
-        self.track_tags = node.get_bool('track-tags', default=False)
+        self.track_tags = node.get_bool("track-tags", default=False)
 
-        self.original_url = node.get_str('url')
-        self.mirror = self.BST_MIRROR_CLASS(self, '', self.original_url, ref, tags=tags, primary=True)
-        self.tracking = node.get_str('track', None)
+        self.original_url = node.get_str("url")
+        self.mirror = self.BST_MIRROR_CLASS(self, "", self.original_url, ref, tags=tags, primary=True)
+        self.tracking = node.get_str("track", None)
 
-        self.ref_format = node.get_enum('ref-format', _RefFormat, _RefFormat.SHA1)
+        self.ref_format = node.get_enum("ref-format", _RefFormat, _RefFormat.SHA1)
 
         # At this point we now know if the source has a ref and/or a track.
         # If it is missing both then we will be unable to track or build.
         if self.mirror.ref is None and self.tracking is None:
-            raise SourceError("{}: Git sources require a ref and/or track".format(self),
-                              reason="missing-track-and-ref")
+            raise SourceError(
+                "{}: Git sources require a ref and/or track".format(self), reason="missing-track-and-ref"
+            )
 
-        self.checkout_submodules = node.get_bool('checkout-submodules', default=True)
+        self.checkout_submodules = node.get_bool("checkout-submodules", default=True)
         self.submodules = []
 
         # Parse a dict of submodule overrides, stored in the submodule_overrides
         # and submodule_checkout_overrides dictionaries.
         self.submodule_overrides = {}
         self.submodule_checkout_overrides = {}
-        modules = node.get_mapping('submodules', {})
+        modules = node.get_mapping("submodules", {})
         for path in modules.keys():
             submodule = modules.get_mapping(path)
-            url = submodule.get_str('url', None)
+            url = submodule.get_str("url", None)
 
             # Make sure to mark all URLs that are specified in the configuration
             if url:
                 self.mark_download_url(url, primary=False)
 
             self.submodule_overrides[path] = url
-            if 'checkout' in submodule:
-                checkout = submodule.get_bool('checkout')
+            if "checkout" in submodule:
+                checkout = submodule.get_bool("checkout")
                 self.submodule_checkout_overrides[path] = checkout
 
         self.mark_download_url(self.original_url)
 
     def preflight(self):
         # Check if git is installed, get the binary at the same time
-        self.host_git = utils.get_host_tool('git')
+        self.host_git = utils.get_host_tool("git")
 
     def get_unique_key(self):
         # Here we want to encode the local name of the repository and
@@ -442,7 +478,7 @@ class _GitSourceBase(Source):
         key = [self.original_url, self.mirror.ref]
         if self.mirror.tags:
             tags = {tag: (commit, annotated) for tag, commit, annotated in self.mirror.tags}
-            key.append({'tags': tags})
+            key.append({"tags": tags})
 
         # Only modify the cache key with checkout_submodules if it's something
         # other than the default behaviour.
@@ -467,7 +503,7 @@ class _GitSourceBase(Source):
         return Consistency.INCONSISTENT
 
     def load_ref(self, node):
-        self.mirror.ref = node.get_str('ref', None)
+        self.mirror.ref = node.get_str("ref", None)
         self.mirror.tags = self._load_tags(node)
 
     def get_ref(self):
@@ -478,25 +514,23 @@ class _GitSourceBase(Source):
     def set_ref(self, ref, node):
         if not ref:
             self.mirror.ref = None
-            if 'ref' in node:
-                del node['ref']
+            if "ref" in node:
+                del node["ref"]
             self.mirror.tags = []
-            if 'tags' in node:
-                del node['tags']
+            if "tags" in node:
+                del node["tags"]
         else:
             actual_ref, tags = ref
-            node['ref'] = self.mirror.ref = actual_ref
+            node["ref"] = self.mirror.ref = actual_ref
             self.mirror.tags = tags
             if tags:
-                node['tags'] = []
+                node["tags"] = []
                 for tag, commit_ref, annotated in tags:
-                    data = {'tag': tag,
-                            'commit': commit_ref,
-                            'annotated': annotated}
-                    node['tags'].append(data)
+                    data = {"tag": tag, "commit": commit_ref, "annotated": annotated}
+                    node["tags"].append(data)
             else:
-                if 'tags' in node:
-                    del node['tags']
+                if "tags" in node:
+                    del node["tags"]
 
     def track(self):  # pylint: disable=arguments-differ
 
@@ -504,17 +538,13 @@ class _GitSourceBase(Source):
         if not self.tracking:
             # Is there a better way to check if a ref is given.
             if self.mirror.ref is None:
-                detail = 'Without a tracking branch ref can not be updated. Please ' + \
-                         'provide a ref or a track.'
-                raise SourceError("{}: No track or ref".format(self),
-                                  detail=detail, reason="track-attempt-no-track")
+                detail = "Without a tracking branch ref can not be updated. Please " + "provide a ref or a track."
+                raise SourceError("{}: No track or ref".format(self), detail=detail, reason="track-attempt-no-track")
             return None
 
         # Resolve the URL for the message
         resolved_url = self.translate_url(self.mirror.url)
-        with self.timed_activity("Tracking {} from {}"
-                                 .format(self.tracking, resolved_url),
-                                 silent_nested=True):
+        with self.timed_activity("Tracking {} from {}".format(self.tracking, resolved_url), silent_nested=True):
             self.mirror.ensure()
             self.mirror._fetch()
 
@@ -578,11 +608,12 @@ class _GitSourceBase(Source):
             for path, url in invalid_submodules:
                 detail.append("  Submodule URL '{}' at path '{}'".format(url, path))
 
-            self.warn("{}: Invalid submodules specified".format(self),
-                      warning_token=WARN_INVALID_SUBMODULE,
-                      detail="The following submodules are specified in the source "
-                      "description but do not exist according to the repository\n\n" +
-                      "\n".join(detail))
+            self.warn(
+                "{}: Invalid submodules specified".format(self),
+                warning_token=WARN_INVALID_SUBMODULE,
+                detail="The following submodules are specified in the source "
+                "description but do not exist according to the repository\n\n" + "\n".join(detail),
+            )
 
         # Warn about submodules which exist but have not been explicitly configured
         if unlisted_submodules:
@@ -590,37 +621,47 @@ class _GitSourceBase(Source):
             for path, url in unlisted_submodules:
                 detail.append("  Submodule URL '{}' at path '{}'".format(url, path))
 
-            self.warn("{}: Unlisted submodules exist".format(self),
-                      warning_token=WARN_UNLISTED_SUBMODULE,
-                      detail="The following submodules exist but are not specified " +
-                      "in the source description\n\n" +
-                      "\n".join(detail))
+            self.warn(
+                "{}: Unlisted submodules exist".format(self),
+                warning_token=WARN_UNLISTED_SUBMODULE,
+                detail="The following submodules exist but are not specified "
+                + "in the source description\n\n"
+                + "\n".join(detail),
+            )
 
         # Assert that the ref exists in the track tag/branch, if track has been specified.
         ref_in_track = False
         if self.tracking:
-            _, branch = self.check_output([self.host_git, 'branch', '--list', self.tracking,
-                                           '--contains', self.mirror.ref],
-                                          cwd=self.mirror.mirror)
+            _, branch = self.check_output(
+                [self.host_git, "branch", "--list", self.tracking, "--contains", self.mirror.ref],
+                cwd=self.mirror.mirror,
+            )
             if branch:
                 ref_in_track = True
             else:
-                _, tag = self.check_output([self.host_git, 'tag', '--list', self.tracking,
-                                            '--contains', self.mirror.ref],
-                                           cwd=self.mirror.mirror)
+                _, tag = self.check_output(
+                    [self.host_git, "tag", "--list", self.tracking, "--contains", self.mirror.ref],
+                    cwd=self.mirror.mirror,
+                )
                 if tag:
                     ref_in_track = True
 
             if not ref_in_track:
-                detail = "The ref provided for the element does not exist locally " + \
-                         "in the provided track branch / tag '{}'.\n".format(self.tracking) + \
-                         "You may wish to track the element to update the ref from '{}' ".format(self.tracking) + \
-                         "with `bst source track`,\n" + \
-                         "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
+                detail = (
+                    "The ref provided for the element does not exist locally "
+                    + "in the provided track branch / tag '{}'.\n".format(self.tracking)
+                    + "You may wish to track the element to update the ref from '{}' ".format(self.tracking)
+                    + "with `bst source track`,\n"
+                    + "or examine the upstream at '{}' for the specific ref.".format(self.mirror.url)
+                )
 
-                self.warn("{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n"
-                          .format(self, self.mirror.ref, self.tracking, self.mirror.url),
-                          detail=detail, warning_token=CoreWarnings.REF_NOT_IN_TRACK)
+                self.warn(
+                    "{}: expected ref '{}' was not found in given track '{}' for staged repository: '{}'\n".format(
+                        self, self.mirror.ref, self.tracking, self.mirror.url
+                    ),
+                    detail=detail,
+                    warning_token=CoreWarnings.REF_NOT_IN_TRACK,
+                )
 
     ###########################################################
     #                     Local Functions                     #
@@ -668,11 +709,11 @@ class _GitSourceBase(Source):
 
     def _load_tags(self, node):
         tags = []
-        tags_node = node.get_sequence('tags', [])
+        tags_node = node.get_sequence("tags", [])
         for tag_node in tags_node:
-            tag = tag_node.get_str('tag')
-            commit_ref = tag_node.get_str('commit')
-            annotated = tag_node.get_bool('annotated')
+            tag = tag_node.get_str("tag")
+            commit_ref = tag_node.get_str("commit")
+            annotated = tag_node.get_bool("annotated")
             tags.append((tag, commit_ref, annotated))
         return tags
 
diff --git a/src/buildstream/_includes.py b/src/buildstream/_includes.py
index c04601b..bc0d771 100644
--- a/src/buildstream/_includes.py
+++ b/src/buildstream/_includes.py
@@ -14,7 +14,6 @@ from ._exceptions import LoadError, LoadErrorReason
 #                      provenance. Should be true if intended to be
 #                      serialized.
 class Includes:
-
     def __init__(self, loader, *, copy_tree=False):
         self._loader = loader
         self._loaded = {}
@@ -29,14 +28,11 @@ class Includes:
     #    included (set): Fail for recursion if trying to load any files in this set
     #    current_loader (Loader): Use alternative loader (for junction files)
     #    only_local (bool): Whether to ignore junction files
-    def process(self, node, *,
-                included=set(),
-                current_loader=None,
-                only_local=False):
+    def process(self, node, *, included=set(), current_loader=None, only_local=False):
         if current_loader is None:
             current_loader = self._loader
 
-        includes_node = node.get_node('(@)', allowed_types=[ScalarNode, SequenceNode], allow_none=True)
+        includes_node = node.get_node("(@)", allowed_types=[ScalarNode, SequenceNode], allow_none=True)
 
         if includes_node:
             if type(includes_node) is ScalarNode:  # pylint: disable=unidiomatic-typecheck
@@ -44,23 +40,24 @@ class Includes:
             else:
                 includes = includes_node.as_str_list()
 
-            del node['(@)']
+            del node["(@)"]
 
             for include in reversed(includes):
-                if only_local and ':' in include:
+                if only_local and ":" in include:
                     continue
                 try:
-                    include_node, file_path, sub_loader = self._include_file(include,
-                                                                             current_loader)
+                    include_node, file_path, sub_loader = self._include_file(include, current_loader)
                 except LoadError as e:
                     include_provenance = includes_node.get_provenance()
                     if e.reason == LoadErrorReason.MISSING_FILE:
                         message = "{}: Include block references a file that could not be found: '{}'.".format(
-                            include_provenance, include)
+                            include_provenance, include
+                        )
                         raise LoadError(message, LoadErrorReason.MISSING_FILE) from e
                     if e.reason == LoadErrorReason.LOADING_DIRECTORY:
                         message = "{}: Include block references a directory instead of a file: '{}'.".format(
-                            include_provenance, include)
+                            include_provenance, include
+                        )
                         raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY) from e
 
                     # Otherwise, we don't know the reason, so just raise
@@ -68,8 +65,10 @@ class Includes:
 
                 if file_path in included:
                     include_provenance = includes_node.get_provenance()
-                    raise LoadError("{}: trying to recursively include {}". format(include_provenance, file_path),
-                                    LoadErrorReason.RECURSIVE_INCLUDE)
+                    raise LoadError(
+                        "{}: trying to recursively include {}".format(include_provenance, file_path),
+                        LoadErrorReason.RECURSIVE_INCLUDE,
+                    )
                 # Because the included node will be modified, we need
                 # to copy it so that we do not modify the toplevel
                 # node of the provenance.
@@ -77,19 +76,14 @@ class Includes:
 
                 try:
                     included.add(file_path)
-                    self.process(include_node, included=included,
-                                 current_loader=sub_loader,
-                                 only_local=only_local)
+                    self.process(include_node, included=included, current_loader=sub_loader, only_local=only_local)
                 finally:
                     included.remove(file_path)
 
                 include_node._composite_under(node)
 
         for value in node.values():
-            self._process_value(value,
-                                included=included,
-                                current_loader=current_loader,
-                                only_local=only_local)
+            self._process_value(value, included=included, current_loader=current_loader, only_local=only_local)
 
     # _include_file()
     #
@@ -101,8 +95,8 @@ class Includes:
     #    loader (Loader): Loader for the current project.
     def _include_file(self, include, loader):
         shortname = include
-        if ':' in include:
-            junction, include = include.split(':', 1)
+        if ":" in include:
+            junction, include = include.split(":", 1)
             junction_loader = loader._get_loader(junction)
             current_loader = junction_loader
         else:
@@ -112,10 +106,7 @@ class Includes:
         file_path = os.path.join(directory, include)
         key = (current_loader, file_path)
         if key not in self._loaded:
-            self._loaded[key] = _yaml.load(file_path,
-                                           shortname=shortname,
-                                           project=project,
-                                           copy_tree=self._copy_tree)
+            self._loaded[key] = _yaml.load(file_path, shortname=shortname, project=project, copy_tree=self._copy_tree)
         return self._loaded[key], file_path, current_loader
 
     # _process_value()
@@ -127,20 +118,11 @@ class Includes:
     #    included (set): Fail for recursion if trying to load any files in this set
     #    current_loader (Loader): Use alternative loader (for junction files)
     #    only_local (bool): Whether to ignore junction files
-    def _process_value(self, value, *,
-                       included=set(),
-                       current_loader=None,
-                       only_local=False):
+    def _process_value(self, value, *, included=set(), current_loader=None, only_local=False):
         value_type = type(value)
 
         if value_type is MappingNode:
-            self.process(value,
-                         included=included,
-                         current_loader=current_loader,
-                         only_local=only_local)
+            self.process(value, included=included, current_loader=current_loader, only_local=only_local)
         elif value_type is SequenceNode:
             for v in value:
-                self._process_value(v,
-                                    included=included,
-                                    current_loader=current_loader,
-                                    only_local=only_local)
+                self._process_value(v, included=included, current_loader=current_loader, only_local=only_local)
diff --git a/src/buildstream/_loader/loader.py b/src/buildstream/_loader/loader.py
index e5859e9..da0c0fb 100644
--- a/src/buildstream/_loader/loader.py
+++ b/src/buildstream/_loader/loader.py
@@ -54,8 +54,7 @@ _NO_PROGRESS = object()
 #    fetch_subprojects (callable): A function to fetch subprojects
 #    parent (Loader): A parent Loader object, in the case this is a junctioned Loader
 #
-class Loader():
-
+class Loader:
     def __init__(self, context, project, *, fetch_subprojects, parent=None):
 
         # Ensure we have an absolute path for the base directory
@@ -66,22 +65,22 @@ class Loader():
         #
         # Public members
         #
-        self.project = project   # The associated Project
-        self.loaded = None       # The number of loaded Elements
+        self.project = project  # The associated Project
+        self.loaded = None  # The number of loaded Elements
 
         #
         # Private members
         #
         self._context = context
-        self._options = project.options      # Project options (OptionPool)
-        self._basedir = basedir              # Base project directory
+        self._options = project.options  # Project options (OptionPool)
+        self._basedir = basedir  # Base project directory
         self._first_pass_options = project.first_pass_config.options  # Project options (OptionPool)
-        self._parent = parent                # The parent loader
+        self._parent = parent  # The parent loader
         self._fetch_subprojects = fetch_subprojects
 
         self._meta_elements = {}  # Dict of resolved meta elements by name
-        self._elements = {}       # Dict of elements
-        self._loaders = {}        # Dict of junction loaders
+        self._elements = {}  # Dict of elements
+        self._loaders = {}  # Dict of junction loaders
 
         self._includes = Includes(self, copy_tree=True)
 
@@ -105,9 +104,11 @@ class Loader():
             if os.path.isabs(filename):
                 # XXX Should this just be an assertion ?
                 # Expect that the caller gives us the right thing at least ?
-                raise LoadError("Target '{}' was not specified as a relative "
-                                "path to the base project directory: {}"
-                                .format(filename, self._basedir), LoadErrorReason.INVALID_DATA)
+                raise LoadError(
+                    "Target '{}' was not specified as a relative "
+                    "path to the base project directory: {}".format(filename, self._basedir),
+                    LoadErrorReason.INVALID_DATA,
+                )
 
         self._warn_invalid_elements(targets)
 
@@ -130,8 +131,7 @@ class Loader():
         dummy_target = LoadElement(Node.from_dict({}), "", self)
         # Pylint is not very happy with Cython and can't understand 'dependencies' is a list
         dummy_target.dependencies.extend(  # pylint: disable=no-member
-            Dependency(element, Symbol.RUNTIME, False)
-            for element in target_elements
+            Dependency(element, Symbol.RUNTIME, False) for element in target_elements
         )
 
         with PROFILER.profile(Topics.CIRCULAR_CHECK, "_".join(targets)):
@@ -180,12 +180,12 @@ class Loader():
         # too late. The only time that seems just right is here, when preparing
         # the child process' copy of the Loader.
         #
-        del state['_fetch_subprojects']
+        del state["_fetch_subprojects"]
 
         # Also there's no gain in pickling over the caches, and they might
         # contain things which are unpleasantly large or unable to pickle.
-        del state['_elements']
-        del state['_meta_elements']
+        del state["_elements"]
+        del state["_meta_elements"]
 
         return state
 
@@ -230,14 +230,14 @@ class Loader():
         # Load the data and process any conditional statements therein
         fullpath = os.path.join(self._basedir, filename)
         try:
-            node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable,
-                              project=self.project)
+            node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project)
         except LoadError as e:
             if e.reason == LoadErrorReason.MISSING_FILE:
 
                 if self.project.junction:
-                    message = "Could not find element '{}' in project referred to by junction element '{}'" \
-                              .format(filename, self.project.junction.name)
+                    message = "Could not find element '{}' in project referred to by junction element '{}'".format(
+                        filename, self.project.junction.name
+                    )
                 else:
                     message = "Could not find element '{}' in elements directory '{}'".format(filename, self._basedir)
 
@@ -262,8 +262,8 @@ class Loader():
                 if provenance:
                     message = "{}: {}".format(provenance, message)
                 detail = None
-                if os.path.exists(os.path.join(self._basedir, filename + '.bst')):
-                    element_name = filename + '.bst'
+                if os.path.exists(os.path.join(self._basedir, filename + ".bst")):
+                    element_name = filename + ".bst"
                     detail = "Did you mean '{}'?\n".format(element_name)
                 raise LoadError(message, LoadErrorReason.LOADING_DIRECTORY, detail=detail) from e
 
@@ -333,10 +333,9 @@ class Loader():
 
                 if dep.junction:
                     self._load_file(dep.junction, rewritable, ticker, dep.provenance)
-                    loader = self._get_loader(dep.junction,
-                                              rewritable=rewritable,
-                                              ticker=ticker,
-                                              provenance=dep.provenance)
+                    loader = self._get_loader(
+                        dep.junction, rewritable=rewritable, ticker=ticker, provenance=dep.provenance
+                    )
                     dep_element = loader._load_file(dep.name, rewritable, ticker, dep.provenance)
                 else:
                     dep_element = self._elements.get(dep.name)
@@ -350,14 +349,16 @@ class Loader():
                         loader_queue.append((dep_element, list(reversed(dep_deps)), []))
 
                         # Pylint is not very happy about Cython and can't understand 'node' is a 'MappingNode'
-                        if dep_element.node.get_str(Symbol.KIND) == 'junction':  # pylint: disable=no-member
-                            raise LoadError("{}: Cannot depend on junction" .format(dep.provenance),
-                                            LoadErrorReason.INVALID_DATA)
+                        if dep_element.node.get_str(Symbol.KIND) == "junction":  # pylint: disable=no-member
+                            raise LoadError(
+                                "{}: Cannot depend on junction".format(dep.provenance), LoadErrorReason.INVALID_DATA
+                            )
 
                 # All is well, push the dependency onto the LoadElement
                 # Pylint is not very happy with Cython and can't understand 'dependencies' is a list
                 current_element[0].dependencies.append(  # pylint: disable=no-member
-                    Dependency(dep_element, dep.dep_type, dep.strict))
+                    Dependency(dep_element, dep.dep_type, dep.strict)
+                )
             else:
                 # We do not have any more dependencies to load for this
                 # element on the queue, report any invalid dep names
@@ -397,12 +398,14 @@ class Loader():
                     # Create `chain`, the loop of element dependencies from this
                     # element back to itself, by trimming everything before this
                     # element from the sequence under consideration.
-                    chain = [element.full_name for element in sequence[sequence.index(element):]]
+                    chain = [element.full_name for element in sequence[sequence.index(element) :]]
                     chain.append(element.full_name)
-                    raise LoadError(("Circular dependency detected at element: {}\n" +
-                                     "Dependency chain: {}")
-                                    .format(element.full_name, " -> ".join(chain)),
-                                    LoadErrorReason.CIRCULAR_DEPENDENCY)
+                    raise LoadError(
+                        ("Circular dependency detected at element: {}\n" + "Dependency chain: {}").format(
+                            element.full_name, " -> ".join(chain)
+                        ),
+                        LoadErrorReason.CIRCULAR_DEPENDENCY,
+                    )
                 if element not in validated:
                     # We've not already validated this element, so let's
                     # descend into it to check it out
@@ -447,9 +450,9 @@ class Loader():
         workspace = self._context.get_workspaces().get_workspace(element.name)
         skip_workspace = True
         if workspace:
-            workspace_node = {'kind': 'workspace'}
-            workspace_node['path'] = workspace.get_absolute_path()
-            workspace_node['ref'] = str(workspace.to_dict().get('last_successful', 'ignored'))
+            workspace_node = {"kind": "workspace"}
+            workspace_node["path"] = workspace.get_absolute_path()
+            workspace_node["ref"] = str(workspace.to_dict().get("last_successful", "ignored"))
             node[Symbol.SOURCES] = [workspace_node]
             skip_workspace = False
 
@@ -457,7 +460,7 @@ class Loader():
         for index, source in enumerate(sources):
             kind = source.get_str(Symbol.KIND)
             # the workspace source plugin cannot be used unless the element is workspaced
-            if kind == 'workspace' and skip_workspace:
+            if kind == "workspace" and skip_workspace:
                 continue
 
             del source[Symbol.KIND]
@@ -469,15 +472,20 @@ class Loader():
             meta_source = MetaSource(element.name, index, element_kind, kind, source, directory)
             meta_sources.append(meta_source)
 
-        meta_element = MetaElement(self.project, element.name, element_kind,
-                                   elt_provenance, meta_sources,
-                                   node.get_mapping(Symbol.CONFIG, default={}),
-                                   node.get_mapping(Symbol.VARIABLES, default={}),
-                                   node.get_mapping(Symbol.ENVIRONMENT, default={}),
-                                   node.get_str_list(Symbol.ENV_NOCACHE, default=[]),
-                                   node.get_mapping(Symbol.PUBLIC, default={}),
-                                   node.get_mapping(Symbol.SANDBOX, default={}),
-                                   element_kind == 'junction')
+        meta_element = MetaElement(
+            self.project,
+            element.name,
+            element_kind,
+            elt_provenance,
+            meta_sources,
+            node.get_mapping(Symbol.CONFIG, default={}),
+            node.get_mapping(Symbol.VARIABLES, default={}),
+            node.get_mapping(Symbol.ENVIRONMENT, default={}),
+            node.get_str_list(Symbol.ENV_NOCACHE, default=[]),
+            node.get_mapping(Symbol.PUBLIC, default={}),
+            node.get_mapping(Symbol.SANDBOX, default={}),
+            element_kind == "junction",
+        )
 
         # Cache it now, make sure it's already there before recursing
         self._meta_elements[element.name] = meta_element
@@ -522,9 +530,9 @@ class Loader():
                 else:
                     meta_dep = loader._meta_elements[name]
 
-                if dep.dep_type != 'runtime':
+                if dep.dep_type != "runtime":
                     meta_element.build_dependencies.append(meta_dep)
-                if dep.dep_type != 'build':
+                if dep.dep_type != "build":
                     meta_element.dependencies.append(meta_dep)
                 if dep.strict:
                     meta_element.strict_dependencies.append(meta_dep)
@@ -543,8 +551,7 @@ class Loader():
     # Raises: LoadError
     #
     # Returns: A Loader or None if specified junction does not exist
-    def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0,
-                    provenance=None):
+    def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0, provenance=None):
 
         provenance_str = ""
         if provenance is not None:
@@ -557,17 +564,21 @@ class Loader():
             if loader is None:
                 # do not allow junctions with the same name in different
                 # subprojects
-                raise LoadError("{}Conflicting junction {} in subprojects, define junction in {}"
-                                .format(provenance_str, filename, self.project.name),
-                                LoadErrorReason.CONFLICTING_JUNCTION)
+                raise LoadError(
+                    "{}Conflicting junction {} in subprojects, define junction in {}".format(
+                        provenance_str, filename, self.project.name
+                    ),
+                    LoadErrorReason.CONFLICTING_JUNCTION,
+                )
 
             return loader
 
         if self._parent:
             # junctions in the parent take precedence over junctions defined
             # in subprojects
-            loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker,
-                                              level=level + 1, provenance=provenance)
+            loader = self._parent._get_loader(
+                filename, rewritable=rewritable, ticker=ticker, level=level + 1, provenance=provenance
+            )
             if loader:
                 self._loaders[filename] = loader
                 return loader
@@ -599,10 +610,11 @@ class Loader():
         # Any task counting *inside* the junction will be handled by
         # its loader.
         meta_element = self._collect_element_no_deps(self._elements[filename], _NO_PROGRESS)
-        if meta_element.kind != 'junction':
-            raise LoadError("{}{}: Expected junction but element kind is {}"
-                            .format(provenance_str, filename, meta_element.kind),
-                            LoadErrorReason.INVALID_DATA)
+        if meta_element.kind != "junction":
+            raise LoadError(
+                "{}{}: Expected junction but element kind is {}".format(provenance_str, filename, meta_element.kind),
+                LoadErrorReason.INVALID_DATA,
+            )
 
         # We check that junctions have no dependencies a little
         # early. This is cheating, since we don't technically know
@@ -618,9 +630,7 @@ class Loader():
         # would be nice if this could be done for *all* element types,
         # but since we haven't loaded those yet that's impossible.
         if self._elements[filename].dependencies:
-            raise LoadError(
-                "Dependencies are forbidden for 'junction' elements",
-                LoadErrorReason.INVALID_JUNCTION)
+            raise LoadError("Dependencies are forbidden for 'junction' elements", LoadErrorReason.INVALID_JUNCTION)
 
         element = Element._new_from_meta(meta_element)
         element._update_state()
@@ -628,10 +638,12 @@ class Loader():
         # If this junction element points to a sub-sub-project, we need to
         # find loader for that project.
         if element.target:
-            subproject_loader = self._get_loader(element.target_junction, rewritable=rewritable, ticker=ticker,
-                                                 level=level, provenance=provenance)
-            loader = subproject_loader._get_loader(element.target_element, rewritable=rewritable, ticker=ticker,
-                                                   level=level, provenance=provenance)
+            subproject_loader = self._get_loader(
+                element.target_junction, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance
+            )
+            loader = subproject_loader._get_loader(
+                element.target_element, rewritable=rewritable, ticker=ticker, level=level, provenance=provenance
+            )
             self._loaders[filename] = loader
             return loader
 
@@ -639,15 +651,18 @@ class Loader():
         #
         if element._get_consistency() >= Consistency.RESOLVED and not element._source_cached():
             if ticker:
-                ticker(filename, 'Fetching subproject')
+                ticker(filename, "Fetching subproject")
             self._fetch_subprojects([element])
 
         # Handle the case where a subproject has no ref
         #
         elif element._get_consistency() == Consistency.INCONSISTENT:
             detail = "Try tracking the junction element with `bst source track {}`".format(filename)
-            raise LoadError("{}Subproject has no ref for junction: {}".format(provenance_str, filename),
-                            LoadErrorReason.SUBPROJECT_INCONSISTENT, detail=detail)
+            raise LoadError(
+                "{}Subproject has no ref for junction: {}".format(provenance_str, filename),
+                LoadErrorReason.SUBPROJECT_INCONSISTENT,
+                detail=detail,
+            )
 
         sources = list(element.sources())
         if len(sources) == 1 and sources[0]._get_local_path():
@@ -656,8 +671,9 @@ class Loader():
         else:
             # Stage sources
             element._set_required()
-            basedir = os.path.join(self.project.directory, ".bst", "staged-junctions",
-                                   filename, element._get_cache_key())
+            basedir = os.path.join(
+                self.project.directory, ".bst", "staged-junctions", filename, element._get_cache_key()
+            )
             if not os.path.exists(basedir):
                 os.makedirs(basedir, exist_ok=True)
                 element._stage_sources_at(basedir)
@@ -666,9 +682,15 @@ class Loader():
         project_dir = os.path.join(basedir, element.path)
         try:
             from .._project import Project  # pylint: disable=cyclic-import
-            project = Project(project_dir, self._context, junction=element,
-                              parent_loader=self, search_for_project=False,
-                              fetch_subprojects=self._fetch_subprojects)
+
+            project = Project(
+                project_dir,
+                self._context,
+                junction=element,
+                parent_loader=self,
+                search_for_project=False,
+                fetch_subprojects=self._fetch_subprojects,
+            )
         except LoadError as e:
             if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
                 message = (
@@ -706,7 +728,7 @@ class Loader():
         # We allow to split only once since deep junctions names are forbidden.
         # Users who want to refer to elements in sub-sub-projects are required
         # to create junctions on the top level project.
-        junction_path = name.rsplit(':', 1)
+        junction_path = name.rsplit(":", 1)
         if len(junction_path) == 1:
             return None, junction_path[-1], self
         else:
@@ -760,11 +782,17 @@ class Loader():
                 invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME].append(filename)
 
         if invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]:
-            self._warn("Target elements '{}' do not have expected file extension `.bst` "
-                       "Improperly named elements will not be discoverable by commands"
-                       .format(invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]),
-                       warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX)
+            self._warn(
+                "Target elements '{}' do not have expected file extension `.bst` "
+                "Improperly named elements will not be discoverable by commands".format(
+                    invalid_elements[CoreWarnings.BAD_ELEMENT_SUFFIX]
+                ),
+                warning_token=CoreWarnings.BAD_ELEMENT_SUFFIX,
+            )
         if invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]:
-            self._warn("Target elements '{}' have invalid characerts in their name."
-                       .format(invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]),
-                       warning_token=CoreWarnings.BAD_CHARACTERS_IN_NAME)
+            self._warn(
+                "Target elements '{}' have invalid characerts in their name.".format(
+                    invalid_elements[CoreWarnings.BAD_CHARACTERS_IN_NAME]
+                ),
+                warning_token=CoreWarnings.BAD_CHARACTERS_IN_NAME,
+            )
diff --git a/src/buildstream/_loader/metaelement.py b/src/buildstream/_loader/metaelement.py
index 00d8560..97b0de2 100644
--- a/src/buildstream/_loader/metaelement.py
+++ b/src/buildstream/_loader/metaelement.py
@@ -20,7 +20,7 @@
 from ..node import Node
 
 
-class MetaElement():
+class MetaElement:
 
     # MetaElement()
     #
@@ -40,9 +40,21 @@ class MetaElement():
     #    sandbox: Configuration specific to the sandbox environment
     #    first_pass: The element is to be loaded with first pass configuration (junction)
     #
-    def __init__(self, project, name, kind=None, provenance=None, sources=None, config=None,
-                 variables=None, environment=None, env_nocache=None, public=None,
-                 sandbox=None, first_pass=False):
+    def __init__(
+        self,
+        project,
+        name,
+        kind=None,
+        provenance=None,
+        sources=None,
+        config=None,
+        variables=None,
+        environment=None,
+        env_nocache=None,
+        public=None,
+        sandbox=None,
+        first_pass=False,
+    ):
         self.project = project
         self.name = name
         self.kind = kind
diff --git a/src/buildstream/_loader/metasource.py b/src/buildstream/_loader/metasource.py
index da2c0e2..5466d3a 100644
--- a/src/buildstream/_loader/metasource.py
+++ b/src/buildstream/_loader/metasource.py
@@ -18,7 +18,7 @@
 #        Tristan Van Berkom <tr...@codethink.co.uk>
 
 
-class MetaSource():
+class MetaSource:
 
     # MetaSource()
     #
diff --git a/src/buildstream/_message.py b/src/buildstream/_message.py
index f4f342a..a2844dd 100644
--- a/src/buildstream/_message.py
+++ b/src/buildstream/_message.py
@@ -23,57 +23,55 @@ import os
 
 # Types of status messages.
 #
-class MessageType():
-    DEBUG = "debug"        # Debugging message
-    STATUS = "status"      # Status message, verbose details
-    INFO = "info"          # Informative messages
-    WARN = "warning"       # Warning messages
-    ERROR = "error"        # Error messages
-    BUG = "bug"            # An unhandled exception was raised in a plugin
-    LOG = "log"            # Messages for log files _only_, never in the frontend
+class MessageType:
+    DEBUG = "debug"  # Debugging message
+    STATUS = "status"  # Status message, verbose details
+    INFO = "info"  # Informative messages
+    WARN = "warning"  # Warning messages
+    ERROR = "error"  # Error messages
+    BUG = "bug"  # An unhandled exception was raised in a plugin
+    LOG = "log"  # Messages for log files _only_, never in the frontend
 
     # Timed Messages: SUCCESS and FAIL have duration timestamps
-    START = "start"        # Status start message
-    SUCCESS = "success"    # Successful status complete message
-    FAIL = "failure"       # Failing status complete message
+    START = "start"  # Status start message
+    SUCCESS = "success"  # Successful status complete message
+    FAIL = "failure"  # Failing status complete message
     SKIPPED = "skipped"
 
 
 # Messages which should be reported regardless of whether
 # they are currently silenced or not
-unconditional_messages = [
-    MessageType.INFO,
-    MessageType.WARN,
-    MessageType.FAIL,
-    MessageType.ERROR,
-    MessageType.BUG
-]
+unconditional_messages = [MessageType.INFO, MessageType.WARN, MessageType.FAIL, MessageType.ERROR, MessageType.BUG]
 
 
 # Message object
 #
-class Message():
-
-    def __init__(self, message_type, message, *,
-                 element_name=None,
-                 element_key=None,
-                 detail=None,
-                 action_name=None,
-                 elapsed=None,
-                 logfile=None,
-                 sandbox=False,
-                 scheduler=False):
+class Message:
+    def __init__(
+        self,
+        message_type,
+        message,
+        *,
+        element_name=None,
+        element_key=None,
+        detail=None,
+        action_name=None,
+        elapsed=None,
+        logfile=None,
+        sandbox=False,
+        scheduler=False
+    ):
         self.message_type = message_type  # Message type
-        self.message = message            # The message string
-        self.element_name = element_name   # The instance element name of the issuing plugin
-        self.element_key = element_key    # The display key of the issuing plugin element
-        self.detail = detail              # An additional detail string
-        self.action_name = action_name    # Name of the task queue (fetch, refresh, build, etc)
-        self.elapsed = elapsed            # The elapsed time, in timed messages
-        self.logfile = logfile            # The log file path where commands took place
-        self.sandbox = sandbox            # Whether the error that caused this message used a sandbox
-        self.pid = os.getpid()            # The process pid
-        self.scheduler = scheduler        # Whether this is a scheduler level message
+        self.message = message  # The message string
+        self.element_name = element_name  # The instance element name of the issuing plugin
+        self.element_key = element_key  # The display key of the issuing plugin element
+        self.detail = detail  # An additional detail string
+        self.action_name = action_name  # Name of the task queue (fetch, refresh, build, etc)
+        self.elapsed = elapsed  # The elapsed time, in timed messages
+        self.logfile = logfile  # The log file path where commands took place
+        self.sandbox = sandbox  # Whether the error that caused this message used a sandbox
+        self.pid = os.getpid()  # The process pid
+        self.scheduler = scheduler  # Whether this is a scheduler level message
         self.creation_time = datetime.datetime.now()
         if message_type in (MessageType.SUCCESS, MessageType.FAIL):
             assert elapsed is not None
diff --git a/src/buildstream/_messenger.py b/src/buildstream/_messenger.py
index 20c3277..03b2833 100644
--- a/src/buildstream/_messenger.py
+++ b/src/buildstream/_messenger.py
@@ -39,15 +39,14 @@ if "BST_TEST_SUITE" in os.environ:
 
 # TimeData class to contain times in an object that can be passed around
 # and updated from different places
-class _TimeData():
-    __slots__ = ['start_time']
+class _TimeData:
+    __slots__ = ["start_time"]
 
     def __init__(self, start_time):
         self.start_time = start_time
 
 
-class Messenger():
-
+class Messenger:
     def __init__(self):
         self._message_handler = None
         self._silence_scope_depth = 0
@@ -238,8 +237,9 @@ class Messenger():
                     detail = "{} of {} subtasks processed".format(task.current_progress, task.maximum_progress)
                 else:
                     detail = "{} subtasks processed".format(task.current_progress)
-            message = Message(MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail,
-                              element_name=element_name)
+            message = Message(
+                MessageType.SUCCESS, activity_name, elapsed=elapsed, detail=detail, element_name=element_name
+            )
             self.message(message)
 
     # recorded_messages()
@@ -274,14 +274,13 @@ class Messenger():
 
         # Create the fully qualified logfile in the log directory,
         # appending the pid and .log extension at the end.
-        self._log_filename = os.path.join(logdir,
-                                          '{}.{}.log'.format(filename, os.getpid()))
+        self._log_filename = os.path.join(logdir, "{}.{}.log".format(filename, os.getpid()))
 
         # Ensure the directory exists first
         directory = os.path.dirname(self._log_filename)
         os.makedirs(directory, exist_ok=True)
 
-        with open(self._log_filename, 'a') as logfile:
+        with open(self._log_filename, "a") as logfile:
 
             # Write one last line to the log and flush it to disk
             def flush_log():
@@ -291,7 +290,7 @@ class Messenger():
                 #
                 # So just try to flush as well as we can at SIGTERM time
                 try:
-                    logfile.write('\n\nForcefully terminated\n')
+                    logfile.write("\n\nForcefully terminated\n")
                     logfile.flush()
                 except RuntimeError:
                     os.fsync(logfile.fileno())
@@ -352,26 +351,28 @@ class Messenger():
 
         template += ": {message}"
 
-        detail = ''
+        detail = ""
         if message.detail is not None:
             template += "\n\n{detail}"
-            detail = message.detail.rstrip('\n')
+            detail = message.detail.rstrip("\n")
             detail = INDENT + INDENT.join(detail.splitlines(True))
 
         timecode = EMPTYTIME
         if message.message_type in (MessageType.SUCCESS, MessageType.FAIL):
-            hours, remainder = divmod(int(message.elapsed.total_seconds()), 60**2)
+            hours, remainder = divmod(int(message.elapsed.total_seconds()), 60 ** 2)
             minutes, seconds = divmod(remainder, 60)
             timecode = "{0:02d}:{1:02d}:{2:02d}".format(hours, minutes, seconds)
 
-        text = template.format(timecode=timecode,
-                               element_name=element_name,
-                               type=message.message_type.upper(),
-                               message=message.message,
-                               detail=detail)
+        text = template.format(
+            timecode=timecode,
+            element_name=element_name,
+            type=message.message_type.upper(),
+            message=message.message,
+            detail=detail,
+        )
 
         # Write to the open log file
-        self._log_handle.write('{}\n'.format(text))
+        self._log_handle.write("{}\n".format(text))
         self._log_handle.flush()
 
     # get_state_for_child_job_pickling(self)
@@ -399,21 +400,21 @@ class Messenger():
         # access to private details of Messenger, but it would open up a window
         # where messagesw wouldn't be handled as expected.
         #
-        del state['_message_handler']
+        del state["_message_handler"]
 
         # The render status callback is only used in the main process
         #
-        del state['_render_status_cb']
+        del state["_render_status_cb"]
 
         # The "simple_task" context manager is not needed outside the main
         # process. During testing we override it to something that cannot
         # pickle, so just drop it when pickling to a child job. Note that it
         # will only appear in 'state' if it has been overridden.
         #
-        state.pop('simple_task', None)
+        state.pop("simple_task", None)
 
         # The State object is not needed outside the main process
-        del state['_state']
+        del state["_state"]
 
         return state
 
diff --git a/src/buildstream/_options/option.py b/src/buildstream/_options/option.py
index 51017be..71d2f12 100644
--- a/src/buildstream/_options/option.py
+++ b/src/buildstream/_options/option.py
@@ -27,11 +27,7 @@ if TYPE_CHECKING:
 
 # Shared symbols for validation purposes
 #
-OPTION_SYMBOLS = [
-    'type',
-    'description',
-    'variable'
-]
+OPTION_SYMBOLS = ["type", "description", "variable"]
 
 
 # Option()
@@ -42,7 +38,7 @@ OPTION_SYMBOLS = [
 # the loaded project options is a collection of typed Option
 # instances.
 #
-class Option():
+class Option:
 
     # Subclasses use this to specify the type name used
     # for the yaml format and error messages
@@ -66,12 +62,12 @@ class Option():
     def load(self, node):
         # We don't use the description, but we do require that options have a
         # description.
-        node.get_str('description')
-        self.variable = node.get_str('variable', default=None)
+        node.get_str("description")
+        self.variable = node.get_str("variable", default=None)
 
         # Assert valid symbol name for variable name
         if self.variable is not None:
-            _assert_symbol_name(self.variable, 'variable name', ref_node=node.get_node('variable'))
+            _assert_symbol_name(self.variable, "variable name", ref_node=node.get_node("variable"))
 
     # load_value()
     #
diff --git a/src/buildstream/_options/optionarch.py b/src/buildstream/_options/optionarch.py
index cbe360f..2d663f0 100644
--- a/src/buildstream/_options/optionarch.py
+++ b/src/buildstream/_options/optionarch.py
@@ -36,7 +36,7 @@ from .optionenum import OptionEnum
 #
 class OptionArch(OptionEnum):
 
-    OPTION_TYPE = 'arch'
+    OPTION_TYPE = "arch"
 
     def load(self, node):
         super().load_special(node, allow_default_definition=False)
@@ -54,12 +54,14 @@ class OptionArch(OptionEnum):
                     # Do not terminate the loop early to ensure we validate
                     # all values in the list.
             except PlatformError as e:
-                provenance = node.get_sequence('values').scalar_at(index).get_provenance()
+                provenance = node.get_sequence("values").scalar_at(index).get_provenance()
                 prefix = ""
                 if provenance:
                     prefix = "{}: ".format(provenance)
-                raise LoadError("{}Invalid value for {} option '{}': {}"
-                                .format(prefix, self.OPTION_TYPE, self.name, e), LoadErrorReason.INVALID_DATA)
+                raise LoadError(
+                    "{}Invalid value for {} option '{}': {}".format(prefix, self.OPTION_TYPE, self.name, e),
+                    LoadErrorReason.INVALID_DATA,
+                )
 
         if default_value is None:
             # Host architecture is not supported by the project.
diff --git a/src/buildstream/_options/optionbool.py b/src/buildstream/_options/optionbool.py
index f91cb25..c7289b9 100644
--- a/src/buildstream/_options/optionbool.py
+++ b/src/buildstream/_options/optionbool.py
@@ -27,13 +27,13 @@ from .option import Option, OPTION_SYMBOLS
 #
 class OptionBool(Option):
 
-    OPTION_TYPE = 'bool'
+    OPTION_TYPE = "bool"
 
     def load(self, node):
 
         super().load(node)
-        node.validate_keys(OPTION_SYMBOLS + ['default'])
-        self.value = node.get_bool('default')
+        node.validate_keys(OPTION_SYMBOLS + ["default"])
+        self.value = node.get_bool("default")
 
     def load_value(self, node, *, transform=None):
         if transform:
@@ -42,13 +42,14 @@ class OptionBool(Option):
             self.value = node.get_bool(self.name)
 
     def set_value(self, value):
-        if value in ('True', 'true'):
+        if value in ("True", "true"):
             self.value = True
-        elif value in ('False', 'false'):
+        elif value in ("False", "false"):
             self.value = False
         else:
-            raise LoadError("Invalid value for boolean option {}: {}".format(self.name, value),
-                            LoadErrorReason.INVALID_DATA)
+            raise LoadError(
+                "Invalid value for boolean option {}: {}".format(self.name, value), LoadErrorReason.INVALID_DATA
+            )
 
     def get_value(self):
         if self.value:
diff --git a/src/buildstream/_options/optioneltmask.py b/src/buildstream/_options/optioneltmask.py
index 178999f..5a0d15f 100644
--- a/src/buildstream/_options/optioneltmask.py
+++ b/src/buildstream/_options/optioneltmask.py
@@ -28,7 +28,7 @@ from .optionflags import OptionFlags
 #
 class OptionEltMask(OptionFlags):
 
-    OPTION_TYPE = 'element-mask'
+    OPTION_TYPE = "element-mask"
 
     def load(self, node):
         # Ask the parent constructor to disallow value definitions,
@@ -41,6 +41,6 @@ class OptionEltMask(OptionFlags):
     def load_valid_values(self, node):
         values = []
         for filename in utils.list_relative_paths(self.pool.element_path):
-            if filename.endswith('.bst'):
+            if filename.endswith(".bst"):
                 values.append(filename)
         return values
diff --git a/src/buildstream/_options/optionenum.py b/src/buildstream/_options/optionenum.py
index 4a09413..d30f456 100644
--- a/src/buildstream/_options/optionenum.py
+++ b/src/buildstream/_options/optionenum.py
@@ -27,7 +27,7 @@ from .option import Option, OPTION_SYMBOLS
 #
 class OptionEnum(Option):
 
-    OPTION_TYPE = 'enum'
+    OPTION_TYPE = "enum"
 
     def __init__(self, name, definition, pool):
         self.values = None
@@ -39,17 +39,20 @@ class OptionEnum(Option):
     def load_special(self, node, allow_default_definition=True):
         super().load(node)
 
-        valid_symbols = OPTION_SYMBOLS + ['values']
+        valid_symbols = OPTION_SYMBOLS + ["values"]
         if allow_default_definition:
-            valid_symbols += ['default']
+            valid_symbols += ["default"]
 
         node.validate_keys(valid_symbols)
 
-        self.values = node.get_str_list('values', default=[])
+        self.values = node.get_str_list("values", default=[])
         if not self.values:
-            raise LoadError("{}: No values specified for {} option '{}'"
-                            .format(node.get_provenance(), self.OPTION_TYPE, self.name),
-                            LoadErrorReason.INVALID_DATA,)
+            raise LoadError(
+                "{}: No values specified for {} option '{}'".format(
+                    node.get_provenance(), self.OPTION_TYPE, self.name
+                ),
+                LoadErrorReason.INVALID_DATA,
+            )
 
         # Allow subclass to define the default value
         self.value = self.load_default_value(node)
@@ -77,13 +80,14 @@ class OptionEnum(Option):
                 prefix = "{}: ".format(provenance)
             else:
                 prefix = ""
-            raise LoadError("{}Invalid value for {} option '{}': {}\n"
-                            .format(prefix, self.OPTION_TYPE, self.name, value) +
-                            "Valid values: {}".format(", ".join(self.values)),
-                            LoadErrorReason.INVALID_DATA)
+            raise LoadError(
+                "{}Invalid value for {} option '{}': {}\n".format(prefix, self.OPTION_TYPE, self.name, value)
+                + "Valid values: {}".format(", ".join(self.values)),
+                LoadErrorReason.INVALID_DATA,
+            )
 
     def load_default_value(self, node):
-        value_node = node.get_scalar('default')
+        value_node = node.get_scalar("default")
         value = value_node.as_str()
         self.validate(value, value_node)
         return value
diff --git a/src/buildstream/_options/optionflags.py b/src/buildstream/_options/optionflags.py
index e5217a7..82ede56 100644
--- a/src/buildstream/_options/optionflags.py
+++ b/src/buildstream/_options/optionflags.py
@@ -27,7 +27,7 @@ from .option import Option, OPTION_SYMBOLS
 #
 class OptionFlags(Option):
 
-    OPTION_TYPE = 'flags'
+    OPTION_TYPE = "flags"
 
     def __init__(self, name, definition, pool):
         self.values = None
@@ -39,20 +39,23 @@ class OptionFlags(Option):
     def load_special(self, node, allow_value_definitions=True):
         super().load(node)
 
-        valid_symbols = OPTION_SYMBOLS + ['default']
+        valid_symbols = OPTION_SYMBOLS + ["default"]
         if allow_value_definitions:
-            valid_symbols += ['values']
+            valid_symbols += ["values"]
 
         node.validate_keys(valid_symbols)
 
         # Allow subclass to define the valid values
         self.values = self.load_valid_values(node)
         if not self.values:
-            raise LoadError("{}: No values specified for {} option '{}'"
-                            .format(node.get_provenance(), self.OPTION_TYPE, self.name),
-                            LoadErrorReason.INVALID_DATA)
-
-        value_node = node.get_sequence('default', default=[])
+            raise LoadError(
+                "{}: No values specified for {} option '{}'".format(
+                    node.get_provenance(), self.OPTION_TYPE, self.name
+                ),
+                LoadErrorReason.INVALID_DATA,
+            )
+
+        value_node = node.get_sequence("default", default=[])
         self.value = value_node.as_str_list()
         self.validate(self.value, value_node)
 
@@ -70,7 +73,7 @@ class OptionFlags(Option):
         stripped = "".join(value.split())
 
         # Get the comma separated values
-        list_value = stripped.split(',')
+        list_value = stripped.split(",")
 
         self.validate(list_value)
         self.value = sorted(list_value)
@@ -86,12 +89,13 @@ class OptionFlags(Option):
                     prefix = "{}: ".format(provenance)
                 else:
                     prefix = ""
-                raise LoadError("{}Invalid value for flags option '{}': {}\n"
-                                .format(prefix, self.name, value) +
-                                "Valid values: {}".format(", ".join(self.values)),
-                                LoadErrorReason.INVALID_DATA)
+                raise LoadError(
+                    "{}Invalid value for flags option '{}': {}\n".format(prefix, self.name, value)
+                    + "Valid values: {}".format(", ".join(self.values)),
+                    LoadErrorReason.INVALID_DATA,
+                )
 
     def load_valid_values(self, node):
         # Allow the more descriptive error to raise when no values
         # exist rather than bailing out here (by specifying default_value)
-        return node.get_str_list('values', default=[])
+        return node.get_str_list("values", default=[])
diff --git a/src/buildstream/_options/optionos.py b/src/buildstream/_options/optionos.py
index fcf4552..3f4e902 100644
--- a/src/buildstream/_options/optionos.py
+++ b/src/buildstream/_options/optionos.py
@@ -1,4 +1,3 @@
-
 #
 #  Copyright (C) 2017 Codethink Limited
 #
@@ -26,7 +25,7 @@ from .optionenum import OptionEnum
 #
 class OptionOS(OptionEnum):
 
-    OPTION_TYPE = 'os'
+    OPTION_TYPE = "os"
 
     def load(self, node):
         super().load_special(node, allow_default_definition=False)
diff --git a/src/buildstream/_options/optionpool.py b/src/buildstream/_options/optionpool.py
index a0730c6..f105bb1 100644
--- a/src/buildstream/_options/optionpool.py
+++ b/src/buildstream/_options/optionpool.py
@@ -50,8 +50,7 @@ class OptionTypes(FastEnum):
     OS = OptionOS.OPTION_TYPE
 
 
-class OptionPool():
-
+class OptionPool:
     def __init__(self, element_path):
         # We hold on to the element path for the sake of OptionEltMask
         self.element_path = element_path
@@ -59,7 +58,7 @@ class OptionPool():
         #
         # Private members
         #
-        self._options = {}      # The Options
+        self._options = {}  # The Options
         self._variables = None  # The Options resolved into typed variables
 
         self._environment = None
@@ -69,7 +68,7 @@ class OptionPool():
         state = self.__dict__.copy()
         # Jinja2 Environments don't appear to be serializable. It is easy
         # enough for us to reconstruct this one anyway, so no need to pickle it.
-        del state['_environment']
+        del state["_environment"]
         return state
 
     def __setstate__(self, state):
@@ -90,7 +89,7 @@ class OptionPool():
             # Assert that the option name is a valid symbol
             _assert_symbol_name(option_name, "option name", ref_node=option_definition, allow_dashes=False)
 
-            opt_type_name = option_definition.get_enum('type', OptionTypes)
+            opt_type_name = option_definition.get_enum("type", OptionTypes)
             opt_type = _OPTION_TYPES[opt_type_name.value]
 
             option = opt_type(option_name, option_definition, self)
@@ -110,8 +109,9 @@ class OptionPool():
                 option = self._options[option_name]
             except KeyError as e:
                 p = option_value.get_provenance()
-                raise LoadError("{}: Unknown option '{}' specified"
-                                .format(p, option_name), LoadErrorReason.INVALID_DATA) from e
+                raise LoadError(
+                    "{}: Unknown option '{}' specified".format(p, option_name), LoadErrorReason.INVALID_DATA
+                ) from e
             option.load_value(node, transform=transform)
 
     # load_cli_values()
@@ -129,8 +129,10 @@ class OptionPool():
                 option = self._options[option_name]
             except KeyError as e:
                 if not ignore_unknown:
-                    raise LoadError("Unknown option '{}' specified on the command line"
-                                    .format(option_name), LoadErrorReason.INVALID_DATA) from e
+                    raise LoadError(
+                        "Unknown option '{}' specified on the command line".format(option_name),
+                        LoadErrorReason.INVALID_DATA,
+                    ) from e
             else:
                 option.set_value(option_value)
 
@@ -239,11 +241,13 @@ class OptionPool():
             elif val == "False":
                 return False
             else:  # pragma: nocover
-                raise LoadError("Failed to evaluate expression: {}".format(expression),
-                                LoadErrorReason.EXPRESSION_FAILED)
+                raise LoadError(
+                    "Failed to evaluate expression: {}".format(expression), LoadErrorReason.EXPRESSION_FAILED
+                )
         except jinja2.exceptions.TemplateError as e:
-            raise LoadError("Failed to evaluate expression ({}): {}".format(expression, e),
-                            LoadErrorReason.EXPRESSION_FAILED)
+            raise LoadError(
+                "Failed to evaluate expression ({}): {}".format(expression, e), LoadErrorReason.EXPRESSION_FAILED
+            )
 
     # Recursion assistent for lists, in case there
     # are lists of lists.
@@ -262,25 +266,27 @@ class OptionPool():
     # Return true if a conditional was processed.
     #
     def _process_one_node(self, node):
-        conditions = node.get_sequence('(?)', default=None)
-        assertion = node.get_str('(!)', default=None)
+        conditions = node.get_sequence("(?)", default=None)
+        assertion = node.get_str("(!)", default=None)
 
         # Process assersions first, we want to abort on the first encountered
         # assertion in a given dictionary, and not lose an assertion due to
         # it being overwritten by a later assertion which might also trigger.
         if assertion is not None:
-            p = node.get_scalar('(!)').get_provenance()
+            p = node.get_scalar("(!)").get_provenance()
             raise LoadError("{}: {}".format(p, assertion.strip()), LoadErrorReason.USER_ASSERTION)
 
         if conditions is not None:
-            del node['(?)']
+            del node["(?)"]
 
             for condition in conditions:
                 tuples = list(condition.items())
                 if len(tuples) > 1:
                     provenance = condition.get_provenance()
-                    raise LoadError("{}: Conditional statement has more than one key".format(provenance),
-                                    LoadErrorReason.INVALID_DATA)
+                    raise LoadError(
+                        "{}: Conditional statement has more than one key".format(provenance),
+                        LoadErrorReason.INVALID_DATA,
+                    )
 
                 expression, value = tuples[0]
                 try:
@@ -292,8 +298,10 @@ class OptionPool():
 
                 if type(value) is not MappingNode:  # pylint: disable=unidiomatic-typecheck
                     provenance = condition.get_provenance()
-                    raise LoadError("{}: Only values of type 'dict' can be composed.".format(provenance),
-                                    LoadErrorReason.ILLEGAL_COMPOSITE)
+                    raise LoadError(
+                        "{}: Only values of type 'dict' can be composed.".format(provenance),
+                        LoadErrorReason.ILLEGAL_COMPOSITE,
+                    )
 
                 # Apply the yaml fragment if its condition evaluates to true
                 if apply_fragment:
diff --git a/src/buildstream/_pipeline.py b/src/buildstream/_pipeline.py
index b9efc78..0b9ab5f 100644
--- a/src/buildstream/_pipeline.py
+++ b/src/buildstream/_pipeline.py
@@ -40,27 +40,27 @@ from ._project import ProjectRefStorage
 #
 # These values correspond to the CLI `--deps` arguments for convenience.
 #
-class PipelineSelection():
+class PipelineSelection:
 
     # Select only the target elements in the associated targets
-    NONE = 'none'
+    NONE = "none"
 
     # As NONE, but redirect elements that are capable of it
-    REDIRECT = 'redirect'
+    REDIRECT = "redirect"
 
     # Select elements which must be built for the associated targets to be built
-    PLAN = 'plan'
+    PLAN = "plan"
 
     # All dependencies of all targets, including the targets
-    ALL = 'all'
+    ALL = "all"
 
     # All direct build dependencies and their recursive runtime dependencies,
     # excluding the targets
-    BUILD = 'build'
+    BUILD = "build"
 
     # All direct runtime dependencies and their recursive runtime dependencies,
     # including the targets
-    RUN = 'run'
+    RUN = "run"
 
 
 # Pipeline()
@@ -70,12 +70,11 @@ class PipelineSelection():
 #    context (Context): The Context object
 #    artifacts (Context): The ArtifactCache object
 #
-class Pipeline():
-
+class Pipeline:
     def __init__(self, context, project, artifacts):
 
-        self._context = context     # The Context
-        self._project = project     # The toplevel project
+        self._context = context  # The Context
+        self._project = project  # The toplevel project
 
         #
         # Private members
@@ -108,10 +107,7 @@ class Pipeline():
 
             # Now create element groups to match the input target groups
             elt_iter = iter(elements)
-            element_groups = [
-                [next(elt_iter) for i in range(len(group))]
-                for group in target_groups
-            ]
+            element_groups = [[next(elt_iter) for i in range(len(group))] for group in target_groups]
 
             return tuple(element_groups)
 
@@ -240,8 +236,7 @@ class Pipeline():
             for t in targets:
                 new_elm = t._get_source_element()
                 if new_elm != t and not silent:
-                    self._message(MessageType.INFO, "Element '{}' redirected to '{}'"
-                                  .format(t.name, new_elm.name))
+                    self._message(MessageType.INFO, "Element '{}' redirected to '{}'".format(t.name, new_elm.name))
                 if new_elm not in elements:
                     elements.append(new_elm)
         elif mode == PipelineSelection.PLAN:
@@ -296,9 +291,7 @@ class Pipeline():
         # Build a list of 'intersection' elements, i.e. the set of
         # elements that lie on the border closest to excepted elements
         # between excepted and target elements.
-        intersection = list(itertools.chain.from_iterable(
-            find_intersection(element) for element in except_targets
-        ))
+        intersection = list(itertools.chain.from_iterable(find_intersection(element) for element in except_targets))
 
         # Now use this set of elements to traverse the targeted
         # elements, except 'intersection' elements and their unique
@@ -354,10 +347,7 @@ class Pipeline():
     #
     def subtract_elements(self, elements, subtract):
         subtract_set = set(subtract)
-        return [
-            e for e in elements
-            if e not in subtract_set
-        ]
+        return [e for e in elements if e not in subtract_set]
 
     # add_elements()
     #
@@ -426,14 +416,13 @@ class Pipeline():
                 for source in element.sources():
                     if source._get_consistency() == Consistency.INCONSISTENT:
                         detail += "    {} is missing ref\n".format(source)
-                detail += '\n'
+                detail += "\n"
             detail += "Try tracking these elements first with `bst source track`\n"
 
             raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
 
         if inconsistent_workspaced:
-            detail = "Some workspaces exist but are not closed\n" + \
-                     "Try closing them with `bst workspace close`\n\n"
+            detail = "Some workspaces exist but are not closed\n" + "Try closing them with `bst workspace close`\n\n"
             for element in inconsistent_workspaced:
                 detail += "  " + element._get_full_name() + "\n"
             raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline-workspaced")
@@ -449,8 +438,7 @@ class Pipeline():
         uncached = []
         with self._context.messenger.timed_activity("Checking sources"):
             for element in elements:
-                if element._get_consistency() < Consistency.CACHED and \
-                        not element._source_cached():
+                if element._get_consistency() < Consistency.CACHED and not element._source_cached():
                     uncached.append(element)
 
         if uncached:
@@ -460,9 +448,11 @@ class Pipeline():
                 for source in element.sources():
                     if source._get_consistency() < Consistency.CACHED:
                         detail += "    {}\n".format(source)
-                detail += '\n'
-            detail += "Try fetching these elements first with `bst source fetch`,\n" + \
-                      "or run this command with `--fetch` option\n"
+                detail += "\n"
+            detail += (
+                "Try fetching these elements first with `bst source fetch`,\n"
+                + "or run this command with `--fetch` option\n"
+            )
 
             raise PipelineError("Uncached sources", detail=detail, reason="uncached-sources")
 
@@ -483,10 +473,7 @@ class Pipeline():
     #            not contain any cross junction elements.
     #
     def _filter_cross_junctions(self, project, elements):
-        return [
-            element for element in elements
-            if element._get_project() is project
-        ]
+        return [element for element in elements if element._get_project() is project]
 
     # _assert_junction_tracking()
     #
@@ -511,8 +498,10 @@ class Pipeline():
         for element in elements:
             element_project = element._get_project()
             if element_project is not self._project:
-                detail = "Requested to track sources across junction boundaries\n" + \
-                         "in a project which does not use project.refs ref-storage."
+                detail = (
+                    "Requested to track sources across junction boundaries\n"
+                    + "in a project which does not use project.refs ref-storage."
+                )
 
                 raise PipelineError("Untrackable sources", detail=detail, reason="untrackable-sources")
 
@@ -522,8 +511,7 @@ class Pipeline():
     #
     def _message(self, message_type, message, **kwargs):
         args = dict(kwargs)
-        self._context.messenger.message(
-            Message(message_type, message, **args))
+        self._context.messenger.message(Message(message_type, message, **args))
 
 
 # _Planner()
@@ -533,7 +521,7 @@ class Pipeline():
 # parts need to be built depending on build only dependencies
 # being cached, and depth sorting for more efficient processing.
 #
-class _Planner():
+class _Planner:
     def __init__(self):
         self.depth_map = OrderedDict()
         self.visiting_elements = set()
diff --git a/src/buildstream/_platform/darwin.py b/src/buildstream/_platform/darwin.py
index f235353..06491e8 100644
--- a/src/buildstream/_platform/darwin.py
+++ b/src/buildstream/_platform/darwin.py
@@ -59,9 +59,9 @@ class Darwin(Platform):
 
     @staticmethod
     def _create_dummy_sandbox(*args, **kwargs):
-        kwargs['dummy_reason'] = \
-            "OSXFUSE is not supported and there are no supported sandbox " + \
-            "technologies for MacOS at this time"
+        kwargs["dummy_reason"] = (
+            "OSXFUSE is not supported and there are no supported sandbox " + "technologies for MacOS at this time"
+        )
         return SandboxDummy(*args, **kwargs)
 
     def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_platform/fallback.py b/src/buildstream/_platform/fallback.py
index 4f7ff80..b9e9f52 100644
--- a/src/buildstream/_platform/fallback.py
+++ b/src/buildstream/_platform/fallback.py
@@ -20,15 +20,15 @@ from .platform import Platform
 
 
 class Fallback(Platform):
-
     def _check_dummy_sandbox_config(self, config):
         return True
 
     def _create_dummy_sandbox(self, *args, **kwargs):
-        kwargs['dummy_reason'] = \
-            ("FallBack platform only implements dummy sandbox, "
-             "Buildstream may be having issues correctly detecting your platform, "
-             "platform can be forced with BST_FORCE_BACKEND")
+        kwargs["dummy_reason"] = (
+            "FallBack platform only implements dummy sandbox, "
+            "Buildstream may be having issues correctly detecting your platform, "
+            "platform can be forced with BST_FORCE_BACKEND"
+        )
         return SandboxDummy(*args, **kwargs)
 
     def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_platform/linux.py b/src/buildstream/_platform/linux.py
index b400bfa..bdc2e0d 100644
--- a/src/buildstream/_platform/linux.py
+++ b/src/buildstream/_platform/linux.py
@@ -28,17 +28,16 @@ from .._exceptions import PlatformError
 
 
 class Linux(Platform):
-
     def _setup_sandbox(self, force_sandbox):
         sandbox_setups = {
-            'bwrap': self._setup_bwrap_sandbox,
-            'buildbox': self._setup_buildbox_sandbox,
-            'chroot': self._setup_chroot_sandbox,
-            'dummy': self._setup_dummy_sandbox,
+            "bwrap": self._setup_bwrap_sandbox,
+            "buildbox": self._setup_buildbox_sandbox,
+            "chroot": self._setup_chroot_sandbox,
+            "dummy": self._setup_dummy_sandbox,
         }
 
         preferred_sandboxes = [
-            'bwrap',
+            "bwrap",
         ]
 
         self._try_sandboxes(force_sandbox, sandbox_setups, preferred_sandboxes)
@@ -54,11 +53,12 @@ class Linux(Platform):
 
     def can_crossbuild(self, config):
         host_arch = self.get_host_arch()
-        if ((config.build_arch == "x86-32" and host_arch == "x86-64") or
-                (config.build_arch == "aarch32" and host_arch == "aarch64")):
+        if (config.build_arch == "x86-32" and host_arch == "x86-64") or (
+            config.build_arch == "aarch32" and host_arch == "aarch64"
+        ):
             if self.linux32 is None:
                 try:
-                    utils.get_host_tool('linux32')
+                    utils.get_host_tool("linux32")
                     self.linux32 = True
                 except utils.ProgramNotFoundError:
                     self.linux32 = False
@@ -76,7 +76,7 @@ class Linux(Platform):
 
     def _create_dummy_sandbox(self, *args, **kwargs):
         dummy_reasons = " and ".join(self.dummy_reasons)
-        kwargs['dummy_reason'] = dummy_reasons
+        kwargs["dummy_reason"] = dummy_reasons
         return SandboxDummy(*args, **kwargs)
 
     def _setup_dummy_sandbox(self):
@@ -87,11 +87,13 @@ class Linux(Platform):
     # Bubble-wrap sandbox methods
     def _check_sandbox_config_bwrap(self, config):
         from ..sandbox._sandboxbwrap import SandboxBwrap
+
         return SandboxBwrap.check_sandbox_config(self, config)
 
     def _create_bwrap_sandbox(self, *args, **kwargs):
         from ..sandbox._sandboxbwrap import SandboxBwrap
-        kwargs['linux32'] = self.linux32
+
+        kwargs["linux32"] = self.linux32
         return SandboxBwrap(*args, **kwargs)
 
     def _setup_bwrap_sandbox(self):
@@ -110,15 +112,18 @@ class Linux(Platform):
     # Chroot sandbox methods
     def _check_sandbox_config_chroot(self, config):
         from ..sandbox._sandboxchroot import SandboxChroot
+
         return SandboxChroot.check_sandbox_config(self, config)
 
     @staticmethod
     def _create_chroot_sandbox(*args, **kwargs):
         from ..sandbox._sandboxchroot import SandboxChroot
+
         return SandboxChroot(*args, **kwargs)
 
     def _setup_chroot_sandbox(self):
         from ..sandbox._sandboxchroot import SandboxChroot
+
         self._check_sandbox(SandboxChroot)
         self.check_sandbox_config = self._check_sandbox_config_chroot
         self.create_sandbox = Linux._create_chroot_sandbox
@@ -127,18 +132,23 @@ class Linux(Platform):
     # Buildbox sandbox methods
     def _check_sandbox_config_buildbox(self, config):
         from ..sandbox._sandboxbuildbox import SandboxBuildBox
+
         return SandboxBuildBox.check_sandbox_config(self, config)
 
     @staticmethod
     def _create_buildbox_sandbox(*args, **kwargs):
         from ..sandbox._sandboxbuildbox import SandboxBuildBox
-        if kwargs.get('allow_real_directory'):
-            raise PlatformError("The BuildBox Sandbox does not support real directories.",
-                                reason="You are using BuildBox sandbox because BST_FORCE_SANBOX=buildbox")
+
+        if kwargs.get("allow_real_directory"):
+            raise PlatformError(
+                "The BuildBox Sandbox does not support real directories.",
+                reason="You are using BuildBox sandbox because BST_FORCE_SANBOX=buildbox",
+            )
         return SandboxBuildBox(*args, **kwargs)
 
     def _setup_buildbox_sandbox(self):
         from ..sandbox._sandboxbuildbox import SandboxBuildBox
+
         self._check_sandbox(SandboxBuildBox)
         self.check_sandbox_config = self._check_sandbox_config_buildbox
         self.create_sandbox = self._create_buildbox_sandbox
diff --git a/src/buildstream/_platform/platform.py b/src/buildstream/_platform/platform.py
index af49b9e..1fddbe8 100644
--- a/src/buildstream/_platform/platform.py
+++ b/src/buildstream/_platform/platform.py
@@ -29,7 +29,7 @@ from .._exceptions import PlatformError, ImplError, SandboxError
 from .. import utils
 
 
-class Platform():
+class Platform:
     # Platform()
     #
     # A class to manage platform-specific details. Currently holds the
@@ -45,7 +45,7 @@ class Platform():
         self._setup_sandbox(force_sandbox)
 
     def _setup_sandbox(self, force_sandbox):
-        sandbox_setups = {'dummy': self._setup_dummy_sandbox}
+        sandbox_setups = {"dummy": self._setup_dummy_sandbox}
         preferred_sandboxes = []
         self._try_sandboxes(force_sandbox, sandbox_setups, preferred_sandboxes)
 
@@ -58,12 +58,16 @@ class Platform():
             try:
                 sandbox_setups[force_sandbox]()
             except KeyError:
-                raise PlatformError("Forced Sandbox is unavailable on this platform: BST_FORCE_SANDBOX"
-                                    " is set to {} but it is not available".format(force_sandbox))
+                raise PlatformError(
+                    "Forced Sandbox is unavailable on this platform: BST_FORCE_SANDBOX"
+                    " is set to {} but it is not available".format(force_sandbox)
+                )
             except SandboxError as Error:
-                raise PlatformError("Forced Sandbox Error: BST_FORCE_SANDBOX"
-                                    " is set to {} but cannot be setup".format(force_sandbox),
-                                    detail=" and ".join(self.dummy_reasons)) from Error
+                raise PlatformError(
+                    "Forced Sandbox Error: BST_FORCE_SANDBOX"
+                    " is set to {} but cannot be setup".format(force_sandbox),
+                    detail=" and ".join(self.dummy_reasons),
+                ) from Error
         else:
             for good_sandbox in preferred_sandboxes:
                 try:
@@ -73,7 +77,7 @@ class Platform():
                     continue
                 except utils.ProgramNotFoundError:
                     continue
-            sandbox_setups['dummy']()
+            sandbox_setups["dummy"]()
 
     def _check_sandbox(self, Sandbox):
         try:
@@ -87,29 +91,29 @@ class Platform():
         # Meant for testing purposes and therefore hidden in the
         # deepest corners of the source code. Try not to abuse this,
         # please?
-        if os.getenv('BST_FORCE_SANDBOX'):
-            force_sandbox = os.getenv('BST_FORCE_SANDBOX')
+        if os.getenv("BST_FORCE_SANDBOX"):
+            force_sandbox = os.getenv("BST_FORCE_SANDBOX")
         else:
             force_sandbox = None
 
-        if os.getenv('BST_FORCE_BACKEND'):
-            backend = os.getenv('BST_FORCE_BACKEND')
-        elif sys.platform.startswith('darwin'):
-            backend = 'darwin'
-        elif sys.platform.startswith('linux'):
-            backend = 'linux'
-        elif sys.platform == 'win32':
-            backend = 'win32'
+        if os.getenv("BST_FORCE_BACKEND"):
+            backend = os.getenv("BST_FORCE_BACKEND")
+        elif sys.platform.startswith("darwin"):
+            backend = "darwin"
+        elif sys.platform.startswith("linux"):
+            backend = "linux"
+        elif sys.platform == "win32":
+            backend = "win32"
         else:
-            backend = 'fallback'
+            backend = "fallback"
 
-        if backend == 'linux':
+        if backend == "linux":
             from .linux import Linux as PlatformImpl  # pylint: disable=cyclic-import
-        elif backend == 'darwin':
+        elif backend == "darwin":
             from .darwin import Darwin as PlatformImpl  # pylint: disable=cyclic-import
-        elif backend == 'win32':
+        elif backend == "win32":
             from .win32 import Win32 as PlatformImpl  # pylint: disable=cyclic-import
-        elif backend == 'fallback':
+        elif backend == "fallback":
             from .fallback import Fallback as PlatformImpl  # pylint: disable=cyclic-import
         else:
             raise PlatformError("No such platform: '{}'".format(backend))
@@ -156,11 +160,11 @@ class Platform():
             "sparc64": "sparc-v9",
             "sparc-v9": "sparc-v9",
             "x86-32": "x86-32",
-            "x86-64": "x86-64"
+            "x86-64": "x86-64",
         }
 
         try:
-            return aliases[arch.replace('_', '-').lower()]
+            return aliases[arch.replace("_", "-").lower()]
         except KeyError:
             raise PlatformError("Unknown architecture: {}".format(arch))
 
@@ -188,7 +192,7 @@ class Platform():
     def does_multiprocessing_start_require_pickling(self):
         # Note that if the start method has not been set before now, it will be
         # set to the platform default by `get_start_method`.
-        return multiprocessing.get_start_method() != 'fork'
+        return multiprocessing.get_start_method() != "fork"
 
     ##################################################################
     #                        Sandbox functions                       #
@@ -206,12 +210,12 @@ class Platform():
     #     (Sandbox) A sandbox
     #
     def create_sandbox(self, *args, **kwargs):
-        raise ImplError("Platform {platform} does not implement create_sandbox()"
-                        .format(platform=type(self).__name__))
+        raise ImplError("Platform {platform} does not implement create_sandbox()".format(platform=type(self).__name__))
 
     def check_sandbox_config(self, config):
-        raise ImplError("Platform {platform} does not implement check_sandbox_config()"
-                        .format(platform=type(self).__name__))
+        raise ImplError(
+            "Platform {platform} does not implement check_sandbox_config()".format(platform=type(self).__name__)
+        )
 
     def maximize_open_file_limit(self):
         # Need to set resources for _frontend/app.py as this is dependent on the platform
@@ -230,5 +234,6 @@ class Platform():
             resource.setrlimit(resource.RLIMIT_NOFILE, (hard_limit, hard_limit))
 
     def _setup_dummy_sandbox(self):
-        raise ImplError("Platform {platform} does not implement _setup_dummy_sandbox()"
-                        .format(platform=type(self).__name__))
+        raise ImplError(
+            "Platform {platform} does not implement _setup_dummy_sandbox()".format(platform=type(self).__name__)
+        )
diff --git a/src/buildstream/_platform/win32.py b/src/buildstream/_platform/win32.py
index 3668001..a2529d8 100644
--- a/src/buildstream/_platform/win32.py
+++ b/src/buildstream/_platform/win32.py
@@ -20,7 +20,6 @@ from .platform import Platform
 
 
 class Win32(Platform):
-
     def maximize_open_file_limit(self):
         # Note that on Windows, we don't have the 'resource' module to help us
         # configure open file limits.
@@ -50,7 +49,7 @@ class Win32(Platform):
 
     @staticmethod
     def _create_dummy_sandbox(*args, **kwargs):
-        kwargs['dummy_reason'] = "There are no supported sandbox technologies for Win32 at this time."
+        kwargs["dummy_reason"] = "There are no supported sandbox technologies for Win32 at this time."
         return SandboxDummy(*args, **kwargs)
 
     def _setup_dummy_sandbox(self):
diff --git a/src/buildstream/_plugincontext.py b/src/buildstream/_plugincontext.py
index b07c2b3..54839e1 100644
--- a/src/buildstream/_plugincontext.py
+++ b/src/buildstream/_plugincontext.py
@@ -41,10 +41,8 @@ from . import utils
 # a given BuildStream project are isolated to their respective
 # Pipelines.
 #
-class PluginContext():
-
-    def __init__(self, plugin_base, base_type, site_plugin_path, *,
-                 plugin_origins=None, format_versions={}):
+class PluginContext:
+    def __init__(self, plugin_base, base_type, site_plugin_path, *, plugin_origins=None, format_versions={}):
 
         # For pickling across processes, make sure this context has a unique
         # identifier, which we prepend to the identifier of each PluginSource.
@@ -59,7 +57,7 @@ class PluginContext():
         # Private members
         #
         self._base_type = base_type  # The base class plugins derive from
-        self._types = {}             # Plugin type lookup table by kind
+        self._types = {}  # Plugin type lookup table by kind
         self._plugin_origins = plugin_origins or []
 
         # The PluginSource object
@@ -72,8 +70,7 @@ class PluginContext():
 
     def _init_site_source(self):
         self._site_source = self._plugin_base.make_plugin_source(
-            searchpath=self._site_plugin_path,
-            identifier=self._identifier + 'site',
+            searchpath=self._site_plugin_path, identifier=self._identifier + "site",
         )
 
     def __getstate__(self):
@@ -93,11 +90,11 @@ class PluginContext():
         # this by making sure we are not creating new members, only clearing
         # existing ones.
         #
-        del state['_site_source']
-        assert '_types' in state
-        state['_types'] = {}
-        assert '_alternate_sources' in state
-        state['_alternate_sources'] = {}
+        del state["_site_source"]
+        assert "_types" in state
+        state["_types"] = {}
+        assert "_alternate_sources" in state
+        state["_alternate_sources"] = {}
 
         return state
 
@@ -133,60 +130,51 @@ class PluginContext():
         return self._types.values()
 
     def _get_local_plugin_source(self, path):
-        if ('local', path) not in self._alternate_sources:
+        if ("local", path) not in self._alternate_sources:
             # key by a tuple to avoid collision
-            source = self._plugin_base.make_plugin_source(
-                searchpath=[path],
-                identifier=self._identifier + path,
-            )
+            source = self._plugin_base.make_plugin_source(searchpath=[path], identifier=self._identifier + path,)
             # Ensure that sources never get garbage collected,
             # as they'll take the plugins with them.
-            self._alternate_sources[('local', path)] = source
+            self._alternate_sources[("local", path)] = source
         else:
-            source = self._alternate_sources[('local', path)]
+            source = self._alternate_sources[("local", path)]
         return source
 
     def _get_pip_plugin_source(self, package_name, kind):
         defaults = None
-        if ('pip', package_name) not in self._alternate_sources:
+        if ("pip", package_name) not in self._alternate_sources:
             import pkg_resources
+
             # key by a tuple to avoid collision
             try:
-                package = pkg_resources.get_entry_info(package_name,
-                                                       'buildstream.plugins',
-                                                       kind)
+                package = pkg_resources.get_entry_info(package_name, "buildstream.plugins", kind)
             except pkg_resources.DistributionNotFound as e:
-                raise PluginError("Failed to load {} plugin '{}': {}"
-                                  .format(self._base_type.__name__, kind, e)) from e
+                raise PluginError("Failed to load {} plugin '{}': {}".format(self._base_type.__name__, kind, e)) from e
 
             if package is None:
-                raise PluginError("Pip package {} does not contain a plugin named '{}'"
-                                  .format(package_name, kind))
+                raise PluginError("Pip package {} does not contain a plugin named '{}'".format(package_name, kind))
 
             location = package.dist.get_resource_filename(
-                pkg_resources._manager,
-                package.module_name.replace('.', os.sep) + '.py'
+                pkg_resources._manager, package.module_name.replace(".", os.sep) + ".py"
             )
 
             # Also load the defaults - required since setuptools
             # may need to extract the file.
             try:
                 defaults = package.dist.get_resource_filename(
-                    pkg_resources._manager,
-                    package.module_name.replace('.', os.sep) + '.yaml'
+                    pkg_resources._manager, package.module_name.replace(".", os.sep) + ".yaml"
                 )
             except KeyError:
                 # The plugin didn't have an accompanying YAML file
                 defaults = None
 
             source = self._plugin_base.make_plugin_source(
-                searchpath=[os.path.dirname(location)],
-                identifier=self._identifier + os.path.dirname(location),
+                searchpath=[os.path.dirname(location)], identifier=self._identifier + os.path.dirname(location),
             )
-            self._alternate_sources[('pip', package_name)] = source
+            self._alternate_sources[("pip", package_name)] = source
 
         else:
-            source = self._alternate_sources[('pip', package_name)]
+            source = self._alternate_sources[("pip", package_name)]
 
         return source, defaults
 
@@ -199,27 +187,27 @@ class PluginContext():
             loaded_dependency = False
 
             for origin in self._plugin_origins:
-                if kind not in origin.get_str_list('plugins'):
+                if kind not in origin.get_str_list("plugins"):
                     continue
 
-                if origin.get_str('origin') == 'local':
-                    local_path = origin.get_str('path')
+                if origin.get_str("origin") == "local":
+                    local_path = origin.get_str("path")
                     source = self._get_local_plugin_source(local_path)
-                elif origin.get_str('origin') == 'pip':
-                    package_name = origin.get_str('package-name')
+                elif origin.get_str("origin") == "pip":
+                    package_name = origin.get_str("package-name")
                     source, defaults = self._get_pip_plugin_source(package_name, kind)
                 else:
-                    raise PluginError("Failed to load plugin '{}': "
-                                      "Unexpected plugin origin '{}'"
-                                      .format(kind, origin.get_str('origin')))
+                    raise PluginError(
+                        "Failed to load plugin '{}': "
+                        "Unexpected plugin origin '{}'".format(kind, origin.get_str("origin"))
+                    )
                 loaded_dependency = True
                 break
 
             # Fall back to getting the source from site
             if not source:
                 if kind not in self._site_source.list_plugins():
-                    raise PluginError("No {} type registered for kind '{}'"
-                                      .format(self._base_type.__name__, kind))
+                    raise PluginError("No {} type registered for kind '{}'".format(self._base_type.__name__, kind))
 
                 source = self._site_source
 
@@ -241,17 +229,18 @@ class PluginContext():
                 defaults = os.path.join(plugin_dir, plugin_conf_name)
 
         except ImportError as e:
-            raise PluginError("Failed to load {} plugin '{}': {}"
-                              .format(self._base_type.__name__, kind, e)) from e
+            raise PluginError("Failed to load {} plugin '{}': {}".format(self._base_type.__name__, kind, e)) from e
 
         try:
             plugin_type = plugin.setup()
         except AttributeError as e:
-            raise PluginError("{} plugin '{}' did not provide a setup() function"
-                              .format(self._base_type.__name__, kind)) from e
+            raise PluginError(
+                "{} plugin '{}' did not provide a setup() function".format(self._base_type.__name__, kind)
+            ) from e
         except TypeError as e:
-            raise PluginError("setup symbol in {} plugin '{}' is not a function"
-                              .format(self._base_type.__name__, kind)) from e
+            raise PluginError(
+                "setup symbol in {} plugin '{}' is not a function".format(self._base_type.__name__, kind)
+            ) from e
 
         self._assert_plugin(kind, plugin_type)
         self._assert_version(kind, plugin_type)
@@ -259,19 +248,23 @@ class PluginContext():
 
     def _assert_plugin(self, kind, plugin_type):
         if kind in self._types:
-            raise PluginError("Tried to register {} plugin for existing kind '{}' "
-                              "(already registered {})"
-                              .format(self._base_type.__name__, kind, self._types[kind].__name__))
+            raise PluginError(
+                "Tried to register {} plugin for existing kind '{}' "
+                "(already registered {})".format(self._base_type.__name__, kind, self._types[kind].__name__)
+            )
         try:
             if not issubclass(plugin_type, self._base_type):
-                raise PluginError("{} plugin '{}' returned type '{}', which is not a subclass of {}"
-                                  .format(self._base_type.__name__, kind,
-                                          plugin_type.__name__,
-                                          self._base_type.__name__))
+                raise PluginError(
+                    "{} plugin '{}' returned type '{}', which is not a subclass of {}".format(
+                        self._base_type.__name__, kind, plugin_type.__name__, self._base_type.__name__
+                    )
+                )
         except TypeError as e:
-            raise PluginError("{} plugin '{}' returned something that is not a type (expected subclass of {})"
-                              .format(self._base_type.__name__, kind,
-                                      self._base_type.__name__)) from e
+            raise PluginError(
+                "{} plugin '{}' returned something that is not a type (expected subclass of {})".format(
+                    self._base_type.__name__, kind, self._base_type.__name__
+                )
+            ) from e
 
     def _assert_version(self, kind, plugin_type):
 
@@ -282,12 +275,16 @@ class PluginContext():
         req_minor = plugin_type.BST_REQUIRED_VERSION_MINOR
 
         if (bst_major, bst_minor) < (req_major, req_minor):
-            raise PluginError("BuildStream {}.{} is too old for {} plugin '{}' (requires {}.{})"
-                              .format(
-                                  bst_major, bst_minor,
-                                  self._base_type.__name__, kind,
-                                  plugin_type.BST_REQUIRED_VERSION_MAJOR,
-                                  plugin_type.BST_REQUIRED_VERSION_MINOR))
+            raise PluginError(
+                "BuildStream {}.{} is too old for {} plugin '{}' (requires {}.{})".format(
+                    bst_major,
+                    bst_minor,
+                    self._base_type.__name__,
+                    kind,
+                    plugin_type.BST_REQUIRED_VERSION_MAJOR,
+                    plugin_type.BST_REQUIRED_VERSION_MINOR,
+                )
+            )
 
     # _assert_plugin_format()
     #
@@ -296,6 +293,9 @@ class PluginContext():
     #
     def _assert_plugin_format(self, plugin, version):
         if plugin.BST_FORMAT_VERSION < version:
-            raise LoadError("{}: Format version {} is too old for requested version {}"
-                            .format(plugin, plugin.BST_FORMAT_VERSION, version),
-                            LoadErrorReason.UNSUPPORTED_PLUGIN)
+            raise LoadError(
+                "{}: Format version {} is too old for requested version {}".format(
+                    plugin, plugin.BST_FORMAT_VERSION, version
+                ),
+                LoadErrorReason.UNSUPPORTED_PLUGIN,
+            )
diff --git a/src/buildstream/_profile.py b/src/buildstream/_profile.py
index b17215d..854c26e 100644
--- a/src/buildstream/_profile.py
+++ b/src/buildstream/_profile.py
@@ -39,15 +39,15 @@ import time
 #   BST_PROFILE=circ-dep-check:sort-deps bst <command> <args>
 #
 # The special 'all' value will enable all profiles.
-class Topics():
-    CIRCULAR_CHECK = 'circ-dep-check'
-    SORT_DEPENDENCIES = 'sort-deps'
-    LOAD_CONTEXT = 'load-context'
-    LOAD_PROJECT = 'load-project'
-    LOAD_PIPELINE = 'load-pipeline'
-    LOAD_SELECTION = 'load-selection'
-    SCHEDULER = 'scheduler'
-    ALL = 'all'
+class Topics:
+    CIRCULAR_CHECK = "circ-dep-check"
+    SORT_DEPENDENCIES = "sort-deps"
+    LOAD_CONTEXT = "load-context"
+    LOAD_PROJECT = "load-project"
+    LOAD_PIPELINE = "load-pipeline"
+    LOAD_SELECTION = "load-selection"
+    SCHEDULER = "scheduler"
+    ALL = "all"
 
 
 class _Profile:
@@ -63,8 +63,8 @@ class _Profile:
             os.getcwd(),
             "profile-{}-{}".format(
                 datetime.datetime.fromtimestamp(self.start_time).strftime("%Y%m%dT%H%M%S"),
-                self.key.replace("/", "-").replace(".", "-")
-            )
+                self.key.replace("/", "-").replace(".", "-"),
+            ),
         )
         self.log_filename = "{}.log".format(filename_template)
         self.cprofile_filename = "{}.cprofile".format(filename_template)
@@ -86,14 +86,16 @@ class _Profile:
         self.profiler.disable()
 
     def save(self):
-        heading = "\n".join([
-            "-" * 64,
-            "Profile for key: {}".format(self.key),
-            "Started at: {}".format(self.start_time),
-            "\n\t{}".format(self.message) if self.message else "",
-            "-" * 64,
-            ""  # for a final new line
-        ])
+        heading = "\n".join(
+            [
+                "-" * 64,
+                "Profile for key: {}".format(self.key),
+                "Started at: {}".format(self.start_time),
+                "\n\t{}".format(self.message) if self.message else "",
+                "-" * 64,
+                "",  # for a final new line
+            ]
+        )
 
         with open(self.log_filename, "a") as fp:
             stats = pstats.Stats(self.profiler, *self._additional_pstats_files, stream=fp)
@@ -114,10 +116,7 @@ class _Profiler:
         self._active_profilers = []
 
         if settings:
-            self.enabled_topics = {
-                topic
-                for topic in settings.split(":")
-            }
+            self.enabled_topics = {topic for topic in settings.split(":")}
 
     @contextlib.contextmanager
     def profile(self, topic, key, message=None):
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 54a011e..67d41a6 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -49,7 +49,7 @@ from ._workspaces import WORKSPACE_PROJECT_FILE
 
 
 # Project Configuration file
-_PROJECT_CONF_FILE = 'project.conf'
+_PROJECT_CONF_FILE = "project.conf"
 
 
 # List of all places plugins can come from
@@ -64,8 +64,7 @@ class PluginOrigins(FastEnum):
 # A simple object describing the behavior of
 # a host mount.
 #
-class HostMount():
-
+class HostMount:
     def __init__(self, path, host_path=None, optional=False):
 
         # Support environment variable expansion in host mounts
@@ -73,9 +72,9 @@ class HostMount():
         if host_path is not None:
             host_path = os.path.expandvars(host_path)
 
-        self.path = path              # Path inside the sandbox
-        self.host_path = host_path    # Path on the host
-        self.optional = optional      # Optional mounts do not incur warnings or errors
+        self.path = path  # Path inside the sandbox
+        self.host_path = host_path  # Path on the host
+        self.optional = optional  # Optional mounts do not incur warnings or errors
 
         if self.host_path is None:
             self.host_path = self.path
@@ -86,24 +85,32 @@ class ProjectConfig:
     def __init__(self):
         self.element_factory = None
         self.source_factory = None
-        self.options = None                      # OptionPool
-        self.base_variables = {}                 # The base set of variables
-        self.element_overrides = {}              # Element specific configurations
-        self.source_overrides = {}               # Source specific configurations
-        self.mirrors = OrderedDict()             # contains dicts of alias-mappings to URIs.
-        self.default_mirror = None               # The name of the preferred mirror.
-        self._aliases = None                     # Aliases dictionary
+        self.options = None  # OptionPool
+        self.base_variables = {}  # The base set of variables
+        self.element_overrides = {}  # Element specific configurations
+        self.source_overrides = {}  # Source specific configurations
+        self.mirrors = OrderedDict()  # contains dicts of alias-mappings to URIs.
+        self.default_mirror = None  # The name of the preferred mirror.
+        self._aliases = None  # Aliases dictionary
 
 
 # Project()
 #
 # The Project Configuration
 #
-class Project():
-
-    def __init__(self, directory, context, *, junction=None, cli_options=None,
-                 default_mirror=None, parent_loader=None,
-                 search_for_project=True, fetch_subprojects=None):
+class Project:
+    def __init__(
+        self,
+        directory,
+        context,
+        *,
+        junction=None,
+        cli_options=None,
+        default_mirror=None,
+        parent_loader=None,
+        search_for_project=True,
+        fetch_subprojects=None
+    ):
 
         # The project name
         self.name = None
@@ -125,31 +132,31 @@ class Project():
         self._default_targets = None
 
         # ProjectRefs for the main refs and also for junctions
-        self.refs = ProjectRefs(self.directory, 'project.refs')
-        self.junction_refs = ProjectRefs(self.directory, 'junction.refs')
+        self.refs = ProjectRefs(self.directory, "project.refs")
+        self.junction_refs = ProjectRefs(self.directory, "junction.refs")
 
         self.config = ProjectConfig()
         self.first_pass_config = ProjectConfig()
 
-        self.junction = junction                 # The junction Element object, if this is a subproject
+        self.junction = junction  # The junction Element object, if this is a subproject
 
-        self.ref_storage = None                  # ProjectRefStorage setting
-        self.base_environment = {}               # The base set of environment variables
-        self.base_env_nocache = None             # The base nocache mask (list) for the environment
+        self.ref_storage = None  # ProjectRefStorage setting
+        self.base_environment = {}  # The base set of environment variables
+        self.base_env_nocache = None  # The base nocache mask (list) for the environment
 
         #
         # Private Members
         #
 
-        self._default_mirror = default_mirror    # The name of the preferred mirror.
+        self._default_mirror = default_mirror  # The name of the preferred mirror.
 
         self._cli_options = cli_options
 
-        self._fatal_warnings = []             # A list of warnings which should trigger an error
+        self._fatal_warnings = []  # A list of warnings which should trigger an error
 
-        self._shell_command = []      # The default interactive shell command
+        self._shell_command = []  # The default interactive shell command
         self._shell_environment = {}  # Statically set environment vars
-        self._shell_host_files = []   # A list of HostMount objects
+        self._shell_host_files = []  # A list of HostMount objects
 
         self.artifact_cache_specs = None
         self.source_cache_specs = None
@@ -163,7 +170,7 @@ class Project():
         self._fully_loaded = False
         self._project_includes = None
 
-        with PROFILER.profile(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-')):
+        with PROFILER.profile(Topics.LOAD_PROJECT, self.directory.replace(os.sep, "-")):
             self._load(parent_loader=parent_loader, fetch_subprojects=fetch_subprojects)
 
         self._partially_loaded = True
@@ -252,23 +259,24 @@ class Project():
     #    (LoadError): In case that the project path is not valid or does not
     #                 exist
     #
-    def get_path_from_node(self, node, *,
-                           check_is_file=False, check_is_dir=False):
+    def get_path_from_node(self, node, *, check_is_file=False, check_is_dir=False):
         path_str = node.as_str()
         path = Path(path_str)
         full_path = self._absolute_directory_path / path
 
         if full_path.is_symlink():
             provenance = node.get_provenance()
-            raise LoadError("{}: Specified path '{}' must not point to "
-                            "symbolic links ".format(provenance, path_str),
-                            LoadErrorReason.PROJ_PATH_INVALID_KIND)
+            raise LoadError(
+                "{}: Specified path '{}' must not point to " "symbolic links ".format(provenance, path_str),
+                LoadErrorReason.PROJ_PATH_INVALID_KIND,
+            )
 
-        if path.parts and path.parts[0] == '..':
+        if path.parts and path.parts[0] == "..":
             provenance = node.get_provenance()
-            raise LoadError("{}: Specified path '{}' first component must "
-                            "not be '..'".format(provenance, path_str),
-                            LoadErrorReason.PROJ_PATH_INVALID)
+            raise LoadError(
+                "{}: Specified path '{}' first component must " "not be '..'".format(provenance, path_str),
+                LoadErrorReason.PROJ_PATH_INVALID,
+            )
 
         try:
             if sys.version_info[0] == 3 and sys.version_info[1] < 6:
@@ -277,55 +285,81 @@ class Project():
                 full_resolved_path = full_path.resolve(strict=True)  # pylint: disable=unexpected-keyword-arg
         except FileNotFoundError:
             provenance = node.get_provenance()
-            raise LoadError("{}: Specified path '{}' does not exist".format(provenance, path_str),
-                            LoadErrorReason.MISSING_FILE)
+            raise LoadError(
+                "{}: Specified path '{}' does not exist".format(provenance, path_str), LoadErrorReason.MISSING_FILE
+            )
 
         is_inside = self._absolute_directory_path in full_resolved_path.parents or (
-            full_resolved_path == self._absolute_directory_path)
+            full_resolved_path == self._absolute_directory_path
+        )
 
         if not is_inside:
             provenance = node.get_provenance()
-            raise LoadError("{}: Specified path '{}' must not lead outside of the "
-                            "project directory".format(provenance, path_str),
-                            LoadErrorReason.PROJ_PATH_INVALID)
+            raise LoadError(
+                "{}: Specified path '{}' must not lead outside of the "
+                "project directory".format(provenance, path_str),
+                LoadErrorReason.PROJ_PATH_INVALID,
+            )
 
         if path.is_absolute():
             provenance = node.get_provenance()
-            raise LoadError("{}: Absolute path: '{}' invalid.\n"
-                            "Please specify a path relative to the project's root."
-                            .format(provenance, path), LoadErrorReason.PROJ_PATH_INVALID)
+            raise LoadError(
+                "{}: Absolute path: '{}' invalid.\n"
+                "Please specify a path relative to the project's root.".format(provenance, path),
+                LoadErrorReason.PROJ_PATH_INVALID,
+            )
 
-        if full_resolved_path.is_socket() or (
-                full_resolved_path.is_fifo() or
-                full_resolved_path.is_block_device()):
+        if full_resolved_path.is_socket() or (full_resolved_path.is_fifo() or full_resolved_path.is_block_device()):
             provenance = node.get_provenance()
-            raise LoadError("{}: Specified path '{}' points to an unsupported "
-                            "file kind".format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+            raise LoadError(
+                "{}: Specified path '{}' points to an unsupported " "file kind".format(provenance, path_str),
+                LoadErrorReason.PROJ_PATH_INVALID_KIND,
+            )
 
         if check_is_file and not full_resolved_path.is_file():
             provenance = node.get_provenance()
-            raise LoadError("{}: Specified path '{}' is not a regular file"
-                            .format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+            raise LoadError(
+                "{}: Specified path '{}' is not a regular file".format(provenance, path_str),
+                LoadErrorReason.PROJ_PATH_INVALID_KIND,
+            )
 
         if check_is_dir and not full_resolved_path.is_dir():
             provenance = node.get_provenance()
-            raise LoadError("{}: Specified path '{}' is not a directory"
-                            .format(provenance, path_str), LoadErrorReason.PROJ_PATH_INVALID_KIND)
+            raise LoadError(
+                "{}: Specified path '{}' is not a directory".format(provenance, path_str),
+                LoadErrorReason.PROJ_PATH_INVALID_KIND,
+            )
 
         return path_str
 
     def _validate_node(self, node):
-        node.validate_keys([
-            'format-version',
-            'element-path', 'variables',
-            'environment', 'environment-nocache',
-            'split-rules', 'elements', 'plugins',
-            'aliases', 'name', 'defaults',
-            'artifacts', 'options',
-            'fail-on-overlap', 'shell', 'fatal-warnings',
-            'ref-storage', 'sandbox', 'mirrors', 'remote-execution',
-            'sources', 'source-caches', '(@)'
-        ])
+        node.validate_keys(
+            [
+                "format-version",
+                "element-path",
+                "variables",
+                "environment",
+                "environment-nocache",
+                "split-rules",
+                "elements",
+                "plugins",
+                "aliases",
+                "name",
+                "defaults",
+                "artifacts",
+                "options",
+                "fail-on-overlap",
+                "shell",
+                "fatal-warnings",
+                "ref-storage",
+                "sandbox",
+                "mirrors",
+                "remote-execution",
+                "sources",
+                "source-caches",
+                "(@)",
+            ]
+        )
 
     # create_element()
     #
@@ -438,10 +472,7 @@ class Project():
         with self._context.messenger.simple_task("Resolving elements") as task:
             if task:
                 task.set_maximum_progress(self.loader.loaded)
-            elements = [
-                Element._new_from_meta(meta, task)
-                for meta in meta_elements
-            ]
+            elements = [Element._new_from_meta(meta, task) for meta in meta_elements]
 
         Element._clear_meta_elements_cache()
 
@@ -450,13 +481,11 @@ class Project():
         redundant_refs = Element._get_redundant_source_refs()
         if redundant_refs:
             detail = "The following inline specified source references will be ignored:\n\n"
-            lines = [
-                "{}:{}".format(source._get_provenance(), ref)
-                for source, ref in redundant_refs
-            ]
+            lines = ["{}:{}".format(source._get_provenance(), ref) for source, ref in redundant_refs]
             detail += "\n".join(lines)
             self._context.messenger.message(
-                Message(MessageType.WARN, "Ignoring redundant source references", detail=detail))
+                Message(MessageType.WARN, "Ignoring redundant source references", detail=detail)
+            )
 
         return elements
 
@@ -590,49 +619,49 @@ class Project():
         self._project_conf._composite(pre_config_node)
 
         # Assert project's format version early, before validating toplevel keys
-        format_version = pre_config_node.get_int('format-version')
+        format_version = pre_config_node.get_int("format-version")
         if format_version < BST_FORMAT_VERSION_MIN:
             major, minor = utils.get_bst_version()
             raise LoadError(
                 "Project requested format version {}, but BuildStream {}.{} only supports format version {} or above."
-                "Use latest 1.x release"
-                .format(format_version, major, minor, BST_FORMAT_VERSION_MIN), LoadErrorReason.UNSUPPORTED_PROJECT)
+                "Use latest 1.x release".format(format_version, major, minor, BST_FORMAT_VERSION_MIN),
+                LoadErrorReason.UNSUPPORTED_PROJECT,
+            )
 
         if BST_FORMAT_VERSION < format_version:
             major, minor = utils.get_bst_version()
             raise LoadError(
-                "Project requested format version {}, but BuildStream {}.{} only supports up until format version {}"
-                .format(format_version, major, minor, BST_FORMAT_VERSION), LoadErrorReason.UNSUPPORTED_PROJECT)
+                "Project requested format version {}, but BuildStream {}.{} only supports up until format version {}".format(
+                    format_version, major, minor, BST_FORMAT_VERSION
+                ),
+                LoadErrorReason.UNSUPPORTED_PROJECT,
+            )
 
         self._validate_node(pre_config_node)
 
         # The project name, element path and option declarations
         # are constant and cannot be overridden by option conditional statements
         # FIXME: we should be keeping node information for further composition here
-        self.name = self._project_conf.get_str('name')
+        self.name = self._project_conf.get_str("name")
 
         # Validate that project name is a valid symbol name
-        _assert_symbol_name(self.name, "project name",
-                            ref_node=pre_config_node.get_node('name'))
+        _assert_symbol_name(self.name, "project name", ref_node=pre_config_node.get_node("name"))
 
         self.element_path = os.path.join(
-            self.directory,
-            self.get_path_from_node(pre_config_node.get_scalar('element-path'),
-                                    check_is_dir=True)
+            self.directory, self.get_path_from_node(pre_config_node.get_scalar("element-path"), check_is_dir=True)
         )
 
         self.config.options = OptionPool(self.element_path)
         self.first_pass_config.options = OptionPool(self.element_path)
 
-        defaults = pre_config_node.get_mapping('defaults')
-        defaults.validate_keys(['targets'])
+        defaults = pre_config_node.get_mapping("defaults")
+        defaults.validate_keys(["targets"])
         self._default_targets = defaults.get_str_list("targets")
 
         # Fatal warnings
-        self._fatal_warnings = pre_config_node.get_str_list('fatal-warnings', default=[])
+        self._fatal_warnings = pre_config_node.get_str_list("fatal-warnings", default=[])
 
-        self.loader = Loader(self._context, self,
-                             parent=parent_loader, fetch_subprojects=fetch_subprojects)
+        self.loader = Loader(self._context, self, parent=parent_loader, fetch_subprojects=fetch_subprojects)
 
         self._project_includes = Includes(self.loader, copy_tree=False)
 
@@ -641,16 +670,17 @@ class Project():
         config_no_include = self._default_config_node.clone()
         project_conf_first_pass._composite(config_no_include)
 
-        self._load_pass(config_no_include, self.first_pass_config,
-                        ignore_unknown=True)
+        self._load_pass(config_no_include, self.first_pass_config, ignore_unknown=True)
 
         # Use separate file for storing source references
-        ref_storage_node = pre_config_node.get_scalar('ref-storage')
+        ref_storage_node = pre_config_node.get_scalar("ref-storage")
         self.ref_storage = ref_storage_node.as_str()
         if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
             p = ref_storage_node.get_provenance()
-            raise LoadError("{}: Invalid value '{}' specified for ref-storage"
-                            .format(p, self.ref_storage), LoadErrorReason.INVALID_DATA)
+            raise LoadError(
+                "{}: Invalid value '{}' specified for ref-storage".format(p, self.ref_storage),
+                LoadErrorReason.INVALID_DATA,
+            )
 
         if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
             self.junction_refs.load(self.first_pass_config.options)
@@ -692,8 +722,7 @@ class Project():
 
         # Load remote-execution configuration for this project
         project_specs = SandboxRemote.specs_from_config_node(config, self.directory)
-        override_specs = SandboxRemote.specs_from_config_node(
-            self._context.get_overrides(self.name), self.directory)
+        override_specs = SandboxRemote.specs_from_config_node(self._context.get_overrides(self.name), self.directory)
 
         if override_specs is not None:
             self.remote_execution_specs = override_specs
@@ -703,25 +732,25 @@ class Project():
             self.remote_execution_specs = self._context.remote_execution_specs
 
         # Load sandbox environment variables
-        self.base_environment = config.get_mapping('environment')
-        self.base_env_nocache = config.get_str_list('environment-nocache')
+        self.base_environment = config.get_mapping("environment")
+        self.base_env_nocache = config.get_str_list("environment-nocache")
 
         # Load sandbox configuration
-        self._sandbox = config.get_mapping('sandbox')
+        self._sandbox = config.get_mapping("sandbox")
 
         # Load project split rules
-        self._splits = config.get_mapping('split-rules')
+        self._splits = config.get_mapping("split-rules")
 
         # Support backwards compatibility for fail-on-overlap
-        fail_on_overlap = config.get_scalar('fail-on-overlap', None)
+        fail_on_overlap = config.get_scalar("fail-on-overlap", None)
 
         # Deprecation check
         if not fail_on_overlap.is_none():
             self._context.messenger.message(
                 Message(
                     MessageType.WARN,
-                    "Use of fail-on-overlap within project.conf " +
-                    "is deprecated. Consider using fatal-warnings instead."
+                    "Use of fail-on-overlap within project.conf "
+                    + "is deprecated. Consider using fatal-warnings instead.",
                 )
             )
 
@@ -733,29 +762,29 @@ class Project():
             self.refs.load(self.options)
 
         # Parse shell options
-        shell_options = config.get_mapping('shell')
-        shell_options.validate_keys(['command', 'environment', 'host-files'])
-        self._shell_command = shell_options.get_str_list('command')
+        shell_options = config.get_mapping("shell")
+        shell_options.validate_keys(["command", "environment", "host-files"])
+        self._shell_command = shell_options.get_str_list("command")
 
         # Perform environment expansion right away
-        shell_environment = shell_options.get_mapping('environment', default={})
+        shell_environment = shell_options.get_mapping("environment", default={})
         for key in shell_environment.keys():
             value = shell_environment.get_str(key)
             self._shell_environment[key] = os.path.expandvars(value)
 
         # Host files is parsed as a list for convenience
-        host_files = shell_options.get_sequence('host-files', default=[])
+        host_files = shell_options.get_sequence("host-files", default=[])
         for host_file in host_files:
             if isinstance(host_file, ScalarNode):
                 mount = HostMount(host_file)
             else:
                 # Some validation
-                host_file.validate_keys(['path', 'host_path', 'optional'])
+                host_file.validate_keys(["path", "host_path", "optional"])
 
                 # Parse the host mount
-                path = host_file.get_str('path')
-                host_path = host_file.get_str('host_path', default=None)
-                optional = host_file.get_bool('optional', default=False)
+                path = host_file.get_str("path")
+                host_path = host_file.get_str("host_path", default=None)
+                optional = host_file.get_bool("optional", default=False)
                 mount = HostMount(path, host_path, optional)
 
             self._shell_host_files.append(mount)
@@ -770,22 +799,21 @@ class Project():
     #    output (ProjectConfig) - ProjectConfig to load configuration onto.
     #    ignore_unknown (bool) - Whether option loader shoud ignore unknown options.
     #
-    def _load_pass(self, config, output, *,
-                   ignore_unknown=False):
+    def _load_pass(self, config, output, *, ignore_unknown=False):
 
         # Element and Source  type configurations will be composited later onto
         # element/source types, so we delete it from here and run our final
         # assertion after.
-        output.element_overrides = config.get_mapping('elements', default={})
-        output.source_overrides = config.get_mapping('sources', default={})
-        config.safe_del('elements')
-        config.safe_del('sources')
+        output.element_overrides = config.get_mapping("elements", default={})
+        output.source_overrides = config.get_mapping("sources", default={})
+        config.safe_del("elements")
+        config.safe_del("sources")
         config._assert_fully_composited()
 
         self._load_plugin_factories(config, output)
 
         # Load project options
-        options_node = config.get_mapping('options', default={})
+        options_node = config.get_mapping("options", default={})
         output.options.load(options_node)
         if self.junction:
             # load before user configuration
@@ -793,7 +821,7 @@ class Project():
 
         # Collect option values specified in the user configuration
         overrides = self._context.get_overrides(self.name)
-        override_options = overrides.get_mapping('options', default={})
+        override_options = overrides.get_mapping("options", default={})
         output.options.load_yaml_values(override_options)
         if self._cli_options:
             output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
@@ -812,10 +840,10 @@ class Project():
         output.options.process_node(output.source_overrides)
 
         # Load base variables
-        output.base_variables = config.get_mapping('variables')
+        output.base_variables = config.get_mapping("variables")
 
         # Add the project name as a default variable
-        output.base_variables['project-name'] = self.name
+        output.base_variables["project-name"] = self.name
 
         # Extend variables with automatic variables and option exports
         # Initialize it as a string as all variables are processed as strings.
@@ -825,27 +853,24 @@ class Project():
         if self._context.build_max_jobs == 0:
             # User requested automatic max-jobs
             platform = self._context.platform
-            output.base_variables['max-jobs'] = str(platform.get_cpu_count(8))
+            output.base_variables["max-jobs"] = str(platform.get_cpu_count(8))
         else:
             # User requested explicit max-jobs setting
-            output.base_variables['max-jobs'] = str(self._context.build_max_jobs)
+            output.base_variables["max-jobs"] = str(self._context.build_max_jobs)
 
         # Export options into variables, if that was requested
         output.options.export_variables(output.base_variables)
 
         # Override default_mirror if not set by command-line
-        output.default_mirror = self._default_mirror or overrides.get_str(
-            'default-mirror', default=None)
+        output.default_mirror = self._default_mirror or overrides.get_str("default-mirror", default=None)
 
-        mirrors = config.get_sequence('mirrors', default=[])
+        mirrors = config.get_sequence("mirrors", default=[])
         for mirror in mirrors:
-            allowed_mirror_fields = [
-                'name', 'aliases'
-            ]
+            allowed_mirror_fields = ["name", "aliases"]
             mirror.validate_keys(allowed_mirror_fields)
-            mirror_name = mirror.get_str('name')
+            mirror_name = mirror.get_str("name")
             alias_mappings = {}
-            for alias_mapping, uris in mirror.get_mapping('aliases').items():
+            for alias_mapping, uris in mirror.get_mapping("aliases").items():
                 assert type(uris) is SequenceNode  # pylint: disable=unidiomatic-typecheck
                 alias_mappings[alias_mapping] = uris.as_str_list()
             output.mirrors[mirror_name] = alias_mappings
@@ -853,7 +878,7 @@ class Project():
                 output.default_mirror = mirror_name
 
         # Source url aliases
-        output._aliases = config.get_mapping('aliases', default={})
+        output._aliases = config.get_mapping("aliases", default={})
 
     # _find_project_dir()
     #
@@ -873,9 +898,7 @@ class Project():
     def _find_project_dir(self, directory):
         workspace_element = None
         config_filenames = [_PROJECT_CONF_FILE, WORKSPACE_PROJECT_FILE]
-        found_directory, filename = utils._search_upward_for_files(
-            directory, config_filenames
-        )
+        found_directory, filename = utils._search_upward_for_files(directory, config_filenames)
         if filename == _PROJECT_CONF_FILE:
             project_directory = found_directory
         elif filename == WORKSPACE_PROJECT_FILE:
@@ -885,57 +908,62 @@ class Project():
                 project_directory = workspace_project.get_default_project_path()
                 workspace_element = workspace_project.get_default_element()
         else:
-            raise LoadError("None of {names} found in '{path}' or any of its parent directories"
-                            .format(names=config_filenames, path=directory), LoadErrorReason.MISSING_PROJECT_CONF)
+            raise LoadError(
+                "None of {names} found in '{path}' or any of its parent directories".format(
+                    names=config_filenames, path=directory
+                ),
+                LoadErrorReason.MISSING_PROJECT_CONF,
+            )
 
         return project_directory, workspace_element
 
     def _load_plugin_factories(self, config, output):
-        plugin_source_origins = []   # Origins of custom sources
+        plugin_source_origins = []  # Origins of custom sources
         plugin_element_origins = []  # Origins of custom elements
 
         # Plugin origins and versions
-        origins = config.get_sequence('plugins', default=[])
+        origins = config.get_sequence("plugins", default=[])
         source_format_versions = {}
         element_format_versions = {}
         for origin in origins:
             allowed_origin_fields = [
-                'origin', 'sources', 'elements',
-                'package-name', 'path',
+                "origin",
+                "sources",
+                "elements",
+                "package-name",
+                "path",
             ]
             origin.validate_keys(allowed_origin_fields)
 
             # Store source versions for checking later
-            source_versions = origin.get_mapping('sources', default={})
+            source_versions = origin.get_mapping("sources", default={})
             for key in source_versions.keys():
                 if key in source_format_versions:
-                    raise LoadError("Duplicate listing of source '{}'".format(key),
-                                    LoadErrorReason.INVALID_YAML)
+                    raise LoadError("Duplicate listing of source '{}'".format(key), LoadErrorReason.INVALID_YAML)
                 source_format_versions[key] = source_versions.get_int(key)
 
             # Store element versions for checking later
-            element_versions = origin.get_mapping('elements', default={})
+            element_versions = origin.get_mapping("elements", default={})
             for key in element_versions.keys():
                 if key in element_format_versions:
-                    raise LoadError("Duplicate listing of element '{}'".format(key),
-                                    LoadErrorReason.INVALID_YAML)
+                    raise LoadError("Duplicate listing of element '{}'".format(key), LoadErrorReason.INVALID_YAML)
                 element_format_versions[key] = element_versions.get_int(key)
 
             # Store the origins if they're not 'core'.
             # core elements are loaded by default, so storing is unnecessary.
-            origin_value = origin.get_enum('origin', PluginOrigins)
+            origin_value = origin.get_enum("origin", PluginOrigins)
 
             if origin_value != PluginOrigins.CORE:
-                self._store_origin(origin, 'sources', plugin_source_origins)
-                self._store_origin(origin, 'elements', plugin_element_origins)
+                self._store_origin(origin, "sources", plugin_source_origins)
+                self._store_origin(origin, "elements", plugin_element_origins)
 
-        pluginbase = PluginBase(package='buildstream.plugins')
-        output.element_factory = ElementFactory(pluginbase,
-                                                plugin_origins=plugin_element_origins,
-                                                format_versions=element_format_versions)
-        output.source_factory = SourceFactory(pluginbase,
-                                              plugin_origins=plugin_source_origins,
-                                              format_versions=source_format_versions)
+        pluginbase = PluginBase(package="buildstream.plugins")
+        output.element_factory = ElementFactory(
+            pluginbase, plugin_origins=plugin_element_origins, format_versions=element_format_versions
+        )
+        output.source_factory = SourceFactory(
+            pluginbase, plugin_origins=plugin_source_origins, format_versions=source_format_versions
+        )
 
     # _store_origin()
     #
@@ -951,25 +979,25 @@ class Project():
     # Raises:
     #    LoadError if 'origin' is an unexpected value
     def _store_origin(self, origin, plugin_group, destination):
-        expected_groups = ['sources', 'elements']
+        expected_groups = ["sources", "elements"]
         if plugin_group not in expected_groups:
-            raise LoadError("Unexpected plugin group: {}, expecting {}"
-                            .format(plugin_group, expected_groups),
-                            LoadErrorReason.INVALID_DATA)
+            raise LoadError(
+                "Unexpected plugin group: {}, expecting {}".format(plugin_group, expected_groups),
+                LoadErrorReason.INVALID_DATA,
+            )
         if plugin_group in origin.keys():
             origin_node = origin.clone()
             plugins = origin.get_mapping(plugin_group, default={})
-            origin_node['plugins'] = plugins.keys()
+            origin_node["plugins"] = plugins.keys()
 
             for group in expected_groups:
                 if group in origin_node:
                     del origin_node[group]
 
-            if origin_node.get_enum('origin', PluginOrigins) == PluginOrigins.LOCAL:
-                path = self.get_path_from_node(origin.get_scalar('path'),
-                                               check_is_dir=True)
+            if origin_node.get_enum("origin", PluginOrigins) == PluginOrigins.LOCAL:
+                path = self.get_path_from_node(origin.get_scalar("path"), check_is_dir=True)
                 # paths are passed in relative to the project, but must be absolute
-                origin_node['path'] = os.path.join(self.directory, path)
+                origin_node["path"] = os.path.join(self.directory, path)
             destination.append(origin_node)
 
     # _warning_is_fatal():
diff --git a/src/buildstream/_projectrefs.py b/src/buildstream/_projectrefs.py
index 0555488..aca7c67 100644
--- a/src/buildstream/_projectrefs.py
+++ b/src/buildstream/_projectrefs.py
@@ -26,15 +26,15 @@ from ._exceptions import LoadError, LoadErrorReason
 # ProjectRefStorage()
 #
 # Indicates the type of ref storage
-class ProjectRefStorage():
+class ProjectRefStorage:
 
     # Source references are stored inline
     #
-    INLINE = 'inline'
+    INLINE = "inline"
 
     # Source references are stored in a central project.refs file
     #
-    PROJECT_REFS = 'project.refs'
+    PROJECT_REFS = "project.refs"
 
 
 # ProjectRefs()
@@ -45,8 +45,7 @@ class ProjectRefStorage():
 #    directory (str): The project directory
 #    base_name (str): The project.refs basename
 #
-class ProjectRefs():
-
+class ProjectRefs:
     def __init__(self, directory, base_name):
         directory = os.path.abspath(directory)
         self._fullpath = os.path.join(directory, base_name)
@@ -83,12 +82,12 @@ class ProjectRefs():
             self._toplevel_node = _new_synthetic_file(self._fullpath)
             self._toplevel_save = self._toplevel_node
 
-        self._toplevel_node.validate_keys(['projects'])
+        self._toplevel_node.validate_keys(["projects"])
 
         # Ensure we create our toplevel entry point on the fly here
         for node in [self._toplevel_node, self._toplevel_save]:
-            if 'projects' not in node:
-                node['projects'] = {}
+            if "projects" not in node:
+                node["projects"] = {}
 
     # lookup_ref()
     #
@@ -122,7 +121,7 @@ class ProjectRefs():
     # Looks up a ref node in the project.refs file, creates one if ensure is True.
     #
     def _lookup(self, toplevel, project, element, source_index, *, ensure=False):
-        projects = toplevel.get_mapping('projects')
+        projects = toplevel.get_mapping("projects")
 
         # Fetch the project
         try:
diff --git a/src/buildstream/_remote.py b/src/buildstream/_remote.py
index ab1dc19..78f6772 100644
--- a/src/buildstream/_remote.py
+++ b/src/buildstream/_remote.py
@@ -35,14 +35,14 @@ class RemoteType(FastEnum):
     ALL = "all"
 
     def __str__(self):
-        return self.name.lower().replace('_', '-')
+        return self.name.lower().replace("_", "-")
 
 
 # RemoteSpec():
 #
 # Defines the basic structure of a remote specification.
 #
-class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key client_cert instance_name type')):
+class RemoteSpec(namedtuple("RemoteSpec", "url push server_cert client_key client_cert instance_name type")):
 
     # new_from_config_node
     #
@@ -60,15 +60,15 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
     #
     @classmethod
     def new_from_config_node(cls, spec_node, basedir=None):
-        spec_node.validate_keys(['url', 'push', 'server-cert', 'client-key', 'client-cert', 'instance-name', 'type'])
+        spec_node.validate_keys(["url", "push", "server-cert", "client-key", "client-cert", "instance-name", "type"])
 
-        url = spec_node.get_str('url')
+        url = spec_node.get_str("url")
         if not url:
-            provenance = spec_node.get_node('url').get_provenance()
+            provenance = spec_node.get_node("url").get_provenance()
             raise LoadError("{}: empty artifact cache URL".format(provenance), LoadErrorReason.INVALID_DATA)
 
-        push = spec_node.get_bool('push', default=False)
-        instance_name = spec_node.get_str('instance-name', default=None)
+        push = spec_node.get_bool("push", default=False)
+        instance_name = spec_node.get_str("instance-name", default=None)
 
         def parse_cert(key):
             cert = spec_node.get_str(key, default=None)
@@ -80,20 +80,22 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
 
             return cert
 
-        cert_keys = ('server-cert', 'client-key', 'client-cert')
+        cert_keys = ("server-cert", "client-key", "client-cert")
         server_cert, client_key, client_cert = tuple(parse_cert(key) for key in cert_keys)
 
         if client_key and not client_cert:
-            provenance = spec_node.get_node('client-key').get_provenance()
-            raise LoadError("{}: 'client-key' was specified without 'client-cert'".format(provenance),
-                            LoadErrorReason.INVALID_DATA)
+            provenance = spec_node.get_node("client-key").get_provenance()
+            raise LoadError(
+                "{}: 'client-key' was specified without 'client-cert'".format(provenance), LoadErrorReason.INVALID_DATA
+            )
 
         if client_cert and not client_key:
-            provenance = spec_node.get_node('client-cert').get_provenance()
-            raise LoadError("{}: 'client-cert' was specified without 'client-key'".format(provenance),
-                            LoadErrorReason.INVALID_DATA)
+            provenance = spec_node.get_node("client-cert").get_provenance()
+            raise LoadError(
+                "{}: 'client-cert' was specified without 'client-key'".format(provenance), LoadErrorReason.INVALID_DATA
+            )
 
-        type_ = spec_node.get_enum('type', RemoteType, default=RemoteType.ALL)
+        type_ = spec_node.get_enum("type", RemoteType, default=RemoteType.ALL)
 
         return cls(url, push, server_cert, client_key, client_cert, instance_name, type_)
 
@@ -108,11 +110,11 @@ class RemoteSpec(namedtuple('RemoteSpec', 'url push server_cert client_key clien
 RemoteSpec.__new__.__defaults__ = (  # type: ignore
     # mandatory          # url            - The url of the remote
     # mandatory          # push           - Whether the remote should be used for pushing
-    None,                # server_cert    - The server certificate
-    None,                # client_key     - The (private) client key
-    None,                # client_cert    - The (public) client certificate
-    None,                # instance_name  - The (grpc) instance name of the remote
-    RemoteType.ALL       # type           - The type of the remote (index, storage, both)
+    None,  # server_cert    - The server certificate
+    None,  # client_key     - The (private) client key
+    None,  # client_cert    - The (public) client certificate
+    None,  # instance_name  - The (grpc) instance name of the remote
+    RemoteType.ALL,  # type           - The type of the remote (index, storage, both)
 )
 
 
@@ -126,7 +128,7 @@ RemoteSpec.__new__.__defaults__ = (  # type: ignore
 # Customization for the particular protocol is expected to be
 # performed in children.
 #
-class BaseRemote():
+class BaseRemote:
     key_name = None
 
     def __init__(self, spec):
@@ -154,25 +156,24 @@ class BaseRemote():
 
         # Set up the communcation channel
         url = urlparse(self.spec.url)
-        if url.scheme == 'http':
+        if url.scheme == "http":
             port = url.port or 80
-            self.channel = grpc.insecure_channel('{}:{}'.format(url.hostname, port))
-        elif url.scheme == 'https':
+            self.channel = grpc.insecure_channel("{}:{}".format(url.hostname, port))
+        elif url.scheme == "https":
             port = url.port or 443
             try:
                 server_cert, client_key, client_cert = _read_files(
-                    self.spec.server_cert,
-                    self.spec.client_key,
-                    self.spec.client_cert)
+                    self.spec.server_cert, self.spec.client_key, self.spec.client_cert
+                )
             except FileNotFoundError as e:
                 raise RemoteError("Could not read certificates: {}".format(e)) from e
             self.server_cert = server_cert
             self.client_key = client_key
             self.client_cert = client_cert
-            credentials = grpc.ssl_channel_credentials(root_certificates=self.server_cert,
-                                                       private_key=self.client_key,
-                                                       certificate_chain=self.client_cert)
-            self.channel = grpc.secure_channel('{}:{}'.format(url.hostname, port), credentials)
+            credentials = grpc.ssl_channel_credentials(
+                root_certificates=self.server_cert, private_key=self.client_key, certificate_chain=self.client_cert
+            )
+            self.channel = grpc.secure_channel("{}:{}".format(url.hostname, port), credentials)
         else:
             raise RemoteError("Unsupported URL: {}".format(self.spec.url))
 
@@ -258,7 +259,8 @@ class BaseRemote():
 def _read_files(*files):
     def read_file(f):
         if f:
-            with open(f, 'rb') as data:
+            with open(f, "rb") as data:
                 return data.read()
         return None
+
     return (read_file(f) for f in files)
diff --git a/src/buildstream/_scheduler/jobs/elementjob.py b/src/buildstream/_scheduler/jobs/elementjob.py
index 246eb75..6e035be 100644
--- a/src/buildstream/_scheduler/jobs/elementjob.py
+++ b/src/buildstream/_scheduler/jobs/elementjob.py
@@ -69,9 +69,9 @@ class ElementJob(Job):
         super().__init__(*args, **kwargs)
         self.set_name(element._get_full_name())
         self.queue = queue
-        self._element = element                # Set the Element pertaining to the job
-        self._action_cb = action_cb            # The action callable function
-        self._complete_cb = complete_cb        # The complete callable function
+        self._element = element  # Set the Element pertaining to the job
+        self._action_cb = action_cb  # The action callable function
+        self._complete_cb = complete_cb  # The complete callable function
 
         # Set the plugin element name & key for logging purposes
         self.set_message_element_name(self.name)
@@ -97,9 +97,7 @@ class ChildElementJob(ChildJob):
         # This should probably be omitted for non-build tasks but it's harmless here
         elt_env = self._element.get_environment()
         env_dump = yaml.round_trip_dump(elt_env, default_flow_style=False, allow_unicode=True)
-        self.message(MessageType.LOG,
-                     "Build environment for element {}".format(self._element.name),
-                     detail=env_dump)
+        self.message(MessageType.LOG, "Build environment for element {}".format(self._element.name), detail=env_dump)
 
         # Run the action
         return self._action_cb(self._element)
@@ -109,6 +107,6 @@ class ChildElementJob(ChildJob):
 
         workspace = self._element._get_workspace()
         if workspace is not None:
-            data['workspace'] = workspace.to_dict()
+            data["workspace"] = workspace.to_dict()
 
         return data
diff --git a/src/buildstream/_scheduler/jobs/job.py b/src/buildstream/_scheduler/jobs/job.py
index 4e6199e..b2bf1db 100644
--- a/src/buildstream/_scheduler/jobs/job.py
+++ b/src/buildstream/_scheduler/jobs/job.py
@@ -64,7 +64,7 @@ class JobStatus(FastEnum):
 
 
 # Used to distinguish between status messages and return values
-class _Envelope():
+class _Envelope:
     def __init__(self, message_type, message):
         self.message_type = message_type
         self.message = message
@@ -113,35 +113,34 @@ class _MessageType(FastEnum):
 #                   that should be used - should contain {pid}.
 #    max_retries (int): The maximum number of retries
 #
-class Job():
-
+class Job:
     def __init__(self, scheduler, action_name, logfile, *, max_retries=0):
 
         #
         # Public members
         #
-        self.name = None                 # The name of the job, set by the job's subclass
-        self.action_name = action_name   # The action name for the Queue
-        self.child_data = None           # Data to be sent to the main process
+        self.name = None  # The name of the job, set by the job's subclass
+        self.action_name = action_name  # The action name for the Queue
+        self.child_data = None  # Data to be sent to the main process
 
         #
         # Private members
         #
-        self._scheduler = scheduler            # The scheduler
-        self._queue = None                     # A message passing queue
-        self._process = None                   # The Process object
-        self._watcher = None                   # Child process watcher
-        self._listening = False                # Whether the parent is currently listening
-        self._suspended = False                # Whether this job is currently suspended
-        self._max_retries = max_retries        # Maximum number of automatic retries
-        self._result = None                    # Return value of child action in the parent
-        self._tries = 0                        # Try count, for retryable jobs
-        self._terminated = False               # Whether this job has been explicitly terminated
+        self._scheduler = scheduler  # The scheduler
+        self._queue = None  # A message passing queue
+        self._process = None  # The Process object
+        self._watcher = None  # Child process watcher
+        self._listening = False  # Whether the parent is currently listening
+        self._suspended = False  # Whether this job is currently suspended
+        self._max_retries = max_retries  # Maximum number of automatic retries
+        self._result = None  # Return value of child action in the parent
+        self._tries = 0  # Try count, for retryable jobs
+        self._terminated = False  # Whether this job has been explicitly terminated
 
         self._logfile = logfile
-        self._message_element_name = None      # The plugin instance element name for messaging
-        self._message_element_key = None       # The element key for messaging
-        self._element = None                   # The Element() passed to the Job() constructor, if applicable
+        self._message_element_name = None  # The plugin instance element name for messaging
+        self._message_element_key = None  # The element key for messaging
+        self._element = None  # The Element() passed to the Job() constructor, if applicable
 
     # set_name()
     #
@@ -168,23 +167,16 @@ class Job():
             self._max_retries,
             self._tries,
             self._message_element_name,
-            self._message_element_key
+            self._message_element_key,
         )
 
         if self._scheduler.context.platform.does_multiprocessing_start_require_pickling():
-            pickled = pickle_child_job(
-                child_job,
-                self._scheduler.context.get_projects(),
-            )
+            pickled = pickle_child_job(child_job, self._scheduler.context.get_projects(),)
             self._process = _multiprocessing.AsyncioSafeProcess(
-                target=do_pickled_child_job,
-                args=[pickled, self._queue],
+                target=do_pickled_child_job, args=[pickled, self._queue],
             )
         else:
-            self._process = _multiprocessing.AsyncioSafeProcess(
-                target=child_job.child_action,
-                args=[self._queue],
-            )
+            self._process = _multiprocessing.AsyncioSafeProcess(target=child_job.child_action, args=[self._queue],)
 
         # Block signals which are handled in the main process such that
         # the child process does not inherit the parent's state, but the main
@@ -271,8 +263,7 @@ class Job():
     #
     def kill(self):
         # Force kill
-        self.message(MessageType.WARN,
-                     "{} did not terminate gracefully, killing".format(self.action_name))
+        self.message(MessageType.WARN, "{} did not terminate gracefully, killing".format(self.action_name))
         utils._kill_process_tree(self._process.pid)
 
     # suspend()
@@ -281,8 +272,7 @@ class Job():
     #
     def suspend(self):
         if not self._suspended:
-            self.message(MessageType.STATUS,
-                         "{} suspending".format(self.action_name))
+            self.message(MessageType.STATUS, "{} suspending".format(self.action_name))
 
             try:
                 # Use SIGTSTP so that child processes may handle and propagate
@@ -306,8 +296,7 @@ class Job():
     def resume(self, silent=False):
         if self._suspended:
             if not silent and not self._scheduler.terminated:
-                self.message(MessageType.STATUS,
-                             "{} resuming".format(self.action_name))
+                self.message(MessageType.STATUS, "{} resuming".format(self.action_name))
 
             os.kill(self._process.pid, signal.SIGCONT)
             self._suspended = False
@@ -349,7 +338,7 @@ class Job():
     #            override 'element_name' and 'element_key' this way.
     #
     def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
-        kwargs['scheduler'] = True
+        kwargs["scheduler"] = True
         # If default name & key values not provided, set as given job attributes
         if element_name is None:
             element_name = self._message_element_name
@@ -387,8 +376,7 @@ class Job():
     #                   lists, dicts, numbers, but not Element instances).
     #
     def handle_message(self, message):
-        raise ImplError("Job '{kind}' does not implement handle_message()"
-                        .format(kind=type(self).__name__))
+        raise ImplError("Job '{kind}' does not implement handle_message()".format(kind=type(self).__name__))
 
     # parent_complete()
     #
@@ -400,8 +388,7 @@ class Job():
     #    result (any): The result returned by child_process().
     #
     def parent_complete(self, status, result):
-        raise ImplError("Job '{kind}' does not implement parent_complete()"
-                        .format(kind=type(self).__name__))
+        raise ImplError("Job '{kind}' does not implement parent_complete()".format(kind=type(self).__name__))
 
     # create_child_job()
     #
@@ -419,8 +406,7 @@ class Job():
     #    (ChildJob): An instance of a subclass of ChildJob.
     #
     def create_child_job(self, *args, **kwargs):
-        raise ImplError("Job '{kind}' does not implement create_child_job()"
-                        .format(kind=type(self).__name__))
+        raise ImplError("Job '{kind}' does not implement create_child_job()".format(kind=type(self).__name__))
 
     #######################################################
     #                  Local Private Methods              #
@@ -451,9 +437,11 @@ class Job():
             returncode = _ReturnCode(returncode)
         except ValueError:
             # An unexpected return code was returned; fail permanently and report
-            self.message(MessageType.ERROR,
-                         "Internal job process unexpectedly died with exit code {}".format(returncode),
-                         logfile=self._logfile)
+            self.message(
+                MessageType.ERROR,
+                "Internal job process unexpectedly died with exit code {}".format(returncode),
+                logfile=self._logfile,
+            )
             returncode = _ReturnCode.PERM_FAIL
 
         # We don't want to retry if we got OK or a permanent fail.
@@ -503,8 +491,7 @@ class Job():
             # For regression tests only, save the last error domain / reason
             # reported from a child task in the main process, this global state
             # is currently managed in _exceptions.py
-            set_last_task_error(envelope.message['domain'],
-                                envelope.message['reason'])
+            set_last_task_error(envelope.message["domain"], envelope.message["reason"])
         elif envelope.message_type is _MessageType.RESULT:
             assert self._result is None
             self._result = envelope.message
@@ -514,8 +501,7 @@ class Job():
         elif envelope.message_type is _MessageType.SUBCLASS_CUSTOM_MESSAGE:
             self.handle_message(envelope.message)
         else:
-            assert False, "Unhandled message type '{}': {}".format(
-                envelope.message_type, envelope.message)
+            assert False, "Unhandled message type '{}': {}".format(envelope.message_type, envelope.message)
 
     # _parent_process_queue()
     #
@@ -552,8 +538,7 @@ class Job():
         #      http://bugs.python.org/issue3831
         #
         if not self._listening:
-            self._scheduler.loop.add_reader(
-                self._queue._reader.fileno(), self._parent_recv)
+            self._scheduler.loop.add_reader(self._queue._reader.fileno(), self._parent_recv)
             self._listening = True
 
     # _parent_stop_listening()
@@ -589,11 +574,10 @@ class Job():
 #    message_element_key (tuple): None, or the element display key tuple
 #                                to be supplied to the Message() constructor.
 #
-class ChildJob():
-
+class ChildJob:
     def __init__(
-            self, action_name, messenger, logdir, logfile, max_retries, tries,
-            message_element_name, message_element_key):
+        self, action_name, messenger, logdir, logfile, max_retries, tries, message_element_name, message_element_key
+    ):
 
         self.action_name = action_name
 
@@ -624,14 +608,15 @@ class ChildJob():
     #            overriden here.
     #
     def message(self, message_type, message, element_name=None, element_key=None, **kwargs):
-        kwargs['scheduler'] = True
+        kwargs["scheduler"] = True
         # If default name & key values not provided, set as given job attributes
         if element_name is None:
             element_name = self._message_element_name
         if element_key is None:
             element_key = self._message_element_key
-        self._messenger.message(Message(message_type, message, element_name=element_name,
-                                        element_key=element_key, **kwargs))
+        self._messenger.message(
+            Message(message_type, message, element_name=element_name, element_key=element_key, **kwargs)
+        )
 
     # send_message()
     #
@@ -668,8 +653,7 @@ class ChildJob():
     #           the result of the Job.
     #
     def child_process(self):
-        raise ImplError("ChildJob '{kind}' does not implement child_process()"
-                        .format(kind=type(self).__name__))
+        raise ImplError("ChildJob '{kind}' does not implement child_process()".format(kind=type(self).__name__))
 
     # child_process_data()
     #
@@ -723,12 +707,13 @@ class ChildJob():
         def resume_time():
             nonlocal stopped_time
             nonlocal starttime
-            starttime += (datetime.datetime.now() - stopped_time)
+            starttime += datetime.datetime.now() - stopped_time
 
         # Time, log and and run the action function
         #
-        with _signals.suspendable(stop_time, resume_time), \
-                self._messenger.recorded_messages(self._logfile, self._logdir) as filename:
+        with _signals.suspendable(stop_time, resume_time), self._messenger.recorded_messages(
+            self._logfile, self._logdir
+        ) as filename:
 
             self.message(MessageType.START, self.action_name, logfile=filename)
 
@@ -737,8 +722,7 @@ class ChildJob():
                 result = self.child_process()  # pylint: disable=assignment-from-no-return
             except SkipJob as e:
                 elapsed = datetime.datetime.now() - starttime
-                self.message(MessageType.SKIPPED, str(e),
-                             elapsed=elapsed, logfile=filename)
+                self.message(MessageType.SKIPPED, str(e), elapsed=elapsed, logfile=filename)
 
                 # Alert parent of skip by return code
                 self._child_shutdown(_ReturnCode.SKIPPED)
@@ -747,13 +731,16 @@ class ChildJob():
                 retry_flag = e.temporary
 
                 if retry_flag and (self._tries <= self._max_retries):
-                    self.message(MessageType.FAIL,
-                                 "Try #{} failed, retrying".format(self._tries),
-                                 elapsed=elapsed, logfile=filename)
+                    self.message(
+                        MessageType.FAIL,
+                        "Try #{} failed, retrying".format(self._tries),
+                        elapsed=elapsed,
+                        logfile=filename,
+                    )
                 else:
-                    self.message(MessageType.FAIL, str(e),
-                                 elapsed=elapsed, detail=e.detail,
-                                 logfile=filename, sandbox=e.sandbox)
+                    self.message(
+                        MessageType.FAIL, str(e), elapsed=elapsed, detail=e.detail, logfile=filename, sandbox=e.sandbox
+                    )
 
                 self._send_message(_MessageType.CHILD_DATA, self.child_process_data())
 
@@ -764,7 +751,7 @@ class ChildJob():
                 #
                 self._child_shutdown(_ReturnCode.FAIL if retry_flag else _ReturnCode.PERM_FAIL)
 
-            except Exception:                        # pylint: disable=broad-except
+            except Exception:  # pylint: disable=broad-except
 
                 # If an unhandled (not normalized to BstError) occurs, that's a bug,
                 # send the traceback and formatted exception back to the frontend
@@ -773,9 +760,7 @@ class ChildJob():
                 elapsed = datetime.datetime.now() - starttime
                 detail = "An unhandled exception occured:\n\n{}".format(traceback.format_exc())
 
-                self.message(MessageType.BUG, self.action_name,
-                             elapsed=elapsed, detail=detail,
-                             logfile=filename)
+                self.message(MessageType.BUG, self.action_name, elapsed=elapsed, detail=detail, logfile=filename)
                 # Unhandled exceptions should permenantly fail
                 self._child_shutdown(_ReturnCode.PERM_FAIL)
 
@@ -785,8 +770,7 @@ class ChildJob():
                 self._child_send_result(result)
 
                 elapsed = datetime.datetime.now() - starttime
-                self.message(MessageType.SUCCESS, self.action_name, elapsed=elapsed,
-                             logfile=filename)
+                self.message(MessageType.SUCCESS, self.action_name, elapsed=elapsed, logfile=filename)
 
                 # Shutdown needs to stay outside of the above context manager,
                 # make sure we dont try to handle SIGTERM while the process
@@ -825,10 +809,7 @@ class ChildJob():
             domain = e.domain
             reason = e.reason
 
-        self._send_message(_MessageType.ERROR, {
-            'domain': domain,
-            'reason': reason
-        })
+        self._send_message(_MessageType.ERROR, {"domain": domain, "reason": reason})
 
     # _child_send_result()
     #
diff --git a/src/buildstream/_scheduler/jobs/jobpickler.py b/src/buildstream/_scheduler/jobs/jobpickler.py
index b0465ec..1d47f67 100644
--- a/src/buildstream/_scheduler/jobs/jobpickler.py
+++ b/src/buildstream/_scheduler/jobs/jobpickler.py
@@ -37,9 +37,7 @@ _NAME_TO_PROTO_CLASS = {
     "digest": DigestProto,
 }
 
-_PROTO_CLASS_TO_NAME = {
-    cls: name for name, cls in _NAME_TO_PROTO_CLASS.items()
-}
+_PROTO_CLASS_TO_NAME = {cls: name for name, cls in _NAME_TO_PROTO_CLASS.items()}
 
 
 # pickle_child_job()
@@ -57,10 +55,7 @@ def pickle_child_job(child_job, projects):
     # necessary for the job, this includes e.g. the global state of the node
     # module.
     node_module_state = node._get_state_for_pickling()
-    return _pickle_child_job_data(
-        (child_job, node_module_state),
-        projects,
-    )
+    return _pickle_child_job_data((child_job, node_module_state), projects,)
 
 
 # do_pickled_child_job()
@@ -146,10 +141,7 @@ def _pickle_child_job_data(child_job_data, projects):
     ]
 
     plugin_class_to_factory = {
-        cls: factory
-        for factory in factory_list
-        if factory is not None
-        for cls, _ in factory.all_loaded_plugins()
+        cls: factory for factory in factory_list if factory is not None for cls, _ in factory.all_loaded_plugins()
     }
 
     pickled_data = io.BytesIO()
diff --git a/src/buildstream/_scheduler/queues/buildqueue.py b/src/buildstream/_scheduler/queues/buildqueue.py
index dc33e65..d98b494 100644
--- a/src/buildstream/_scheduler/queues/buildqueue.py
+++ b/src/buildstream/_scheduler/queues/buildqueue.py
@@ -50,10 +50,15 @@ class BuildQueue(Queue):
             self._tried.add(element)
             _, description, detail = element._get_build_result()
             logfile = element._get_build_log()
-            self._message(element, MessageType.FAIL, description,
-                          detail=detail, action_name=self.action_name,
-                          elapsed=timedelta(seconds=0),
-                          logfile=logfile)
+            self._message(
+                element,
+                MessageType.FAIL,
+                description,
+                detail=detail,
+                action_name=self.action_name,
+                elapsed=timedelta(seconds=0),
+                logfile=logfile,
+            )
             self._done_queue.append(element)
             element_name = element._get_full_name()
             self._task_group.add_failed_task(element_name)
diff --git a/src/buildstream/_scheduler/queues/queue.py b/src/buildstream/_scheduler/queues/queue.py
index 49fae56..986ac6c 100644
--- a/src/buildstream/_scheduler/queues/queue.py
+++ b/src/buildstream/_scheduler/queues/queue.py
@@ -57,11 +57,11 @@ class QueueStatus(FastEnum):
 # Args:
 #    scheduler (Scheduler): The Scheduler
 #
-class Queue():
+class Queue:
 
     # These should be overridden on class data of of concrete Queue implementations
-    action_name = None      # type: Optional[str]
-    complete_name = None    # type: Optional[str]
+    action_name = None  # type: Optional[str]
+    complete_name = None  # type: Optional[str]
     # Resources this queues' jobs want
     resources = []  # type: List[int]
 
@@ -72,11 +72,11 @@ class Queue():
         #
         self._scheduler = scheduler
         self._resources = scheduler.resources  # Shared resource pool
-        self._ready_queue = []                 # Ready elements
-        self._done_queue = deque()             # Processed / Skipped elements
+        self._ready_queue = []  # Ready elements
+        self._done_queue = deque()  # Processed / Skipped elements
         self._max_retries = 0
 
-        self._required_element_check = False   # Whether we should check that elements are required before enqueuing
+        self._required_element_check = False  # Whether we should check that elements are required before enqueuing
 
         # Assert the subclass has setup class data
         assert self.action_name is not None
@@ -162,8 +162,7 @@ class Queue():
     #    element (Element): The element waiting to be pushed into the queue
     #
     def register_pending_element(self, element):
-        raise ImplError("Queue type: {} does not implement register_pending_element()"
-                        .format(self.action_name))
+        raise ImplError("Queue type: {} does not implement register_pending_element()".format(self.action_name))
 
     #####################################################
     #          Scheduler / Pipeline facing APIs         #
@@ -229,12 +228,16 @@ class Queue():
             ready.append(element)
 
         return [
-            ElementJob(self._scheduler, self.action_name,
-                       self._element_log_path(element),
-                       element=element, queue=self,
-                       action_cb=self.get_process_func(),
-                       complete_cb=self._job_done,
-                       max_retries=self._max_retries)
+            ElementJob(
+                self._scheduler,
+                self.action_name,
+                self._element_log_path(element),
+                element=element,
+                queue=self,
+                action_cb=self.get_process_func(),
+                complete_cb=self._job_done,
+                max_retries=self._max_retries,
+            )
             for element in ready
         ]
 
@@ -267,7 +270,7 @@ class Queue():
     def _update_workspaces(self, element, job):
         workspace_dict = None
         if job.child_data:
-            workspace_dict = job.child_data.get('workspace', None)
+            workspace_dict = job.child_data.get("workspace", None)
 
         # Handle any workspace modifications now
         #
@@ -279,10 +282,13 @@ class Queue():
                     workspaces.save_config()
                 except BstError as e:
                     self._message(element, MessageType.ERROR, "Error saving workspaces", detail=str(e))
-                except Exception:   # pylint: disable=broad-except
-                    self._message(element, MessageType.BUG,
-                                  "Unhandled exception while saving workspaces",
-                                  detail=traceback.format_exc())
+                except Exception:  # pylint: disable=broad-except
+                    self._message(
+                        element,
+                        MessageType.BUG,
+                        "Unhandled exception while saving workspaces",
+                        detail=traceback.format_exc(),
+                    )
 
     # _job_done()
     #
@@ -322,13 +328,13 @@ class Queue():
             #
             set_last_task_error(e.domain, e.reason)
 
-        except Exception:   # pylint: disable=broad-except
+        except Exception:  # pylint: disable=broad-except
 
             # Report unhandled exceptions and mark as failed
             #
-            self._message(element, MessageType.BUG,
-                          "Unhandled exception in post processing",
-                          detail=traceback.format_exc())
+            self._message(
+                element, MessageType.BUG, "Unhandled exception in post processing", detail=traceback.format_exc()
+            )
             self._task_group.add_failed_task(element._get_full_name())
         else:
             # All elements get placed on the done queue for later processing.
@@ -372,7 +378,7 @@ class Queue():
         if status == QueueStatus.SKIP:
             # Place skipped elements into the done queue immediately
             self._task_group.add_skipped_task()
-            self._done_queue.append(element)   # Elements to proceed to the next queue
+            self._done_queue.append(element)  # Elements to proceed to the next queue
         elif status == QueueStatus.READY:
             # Push elements which are ready to be processed immediately into the queue
             heapq.heappush(self._ready_queue, (element._depth, element))
diff --git a/src/buildstream/_scheduler/resources.py b/src/buildstream/_scheduler/resources.py
index 73bf66b..e761587 100644
--- a/src/buildstream/_scheduler/resources.py
+++ b/src/buildstream/_scheduler/resources.py
@@ -1,17 +1,17 @@
-class ResourceType():
+class ResourceType:
     CACHE = 0
     DOWNLOAD = 1
     PROCESS = 2
     UPLOAD = 3
 
 
-class Resources():
+class Resources:
     def __init__(self, num_builders, num_fetchers, num_pushers):
         self._max_resources = {
             ResourceType.CACHE: 0,
             ResourceType.DOWNLOAD: num_fetchers,
             ResourceType.PROCESS: num_builders,
-            ResourceType.UPLOAD: num_pushers
+            ResourceType.UPLOAD: num_pushers,
         }
 
         # Resources jobs are currently using.
@@ -19,7 +19,7 @@ class Resources():
             ResourceType.CACHE: 0,
             ResourceType.DOWNLOAD: 0,
             ResourceType.PROCESS: 0,
-            ResourceType.UPLOAD: 0
+            ResourceType.UPLOAD: 0,
         }
 
         # Resources jobs currently want exclusive access to. The set
@@ -31,7 +31,7 @@ class Resources():
             ResourceType.CACHE: set(),
             ResourceType.DOWNLOAD: set(),
             ResourceType.PROCESS: set(),
-            ResourceType.UPLOAD: set()
+            ResourceType.UPLOAD: set(),
         }
 
     # reserve()
@@ -90,8 +90,7 @@ class Resources():
         # available. If we don't have enough, the job cannot be
         # scheduled.
         for resource in resources:
-            if (self._max_resources[resource] > 0 and
-                    self._used_resources[resource] >= self._max_resources[resource]):
+            if self._max_resources[resource] > 0 and self._used_resources[resource] >= self._max_resources[resource]:
                 return False
 
         # Now we register the fact that our job is using the resources
diff --git a/src/buildstream/_scheduler/scheduler.py b/src/buildstream/_scheduler/scheduler.py
index 7ef5c5f..8f44751 100644
--- a/src/buildstream/_scheduler/scheduler.py
+++ b/src/buildstream/_scheduler/scheduler.py
@@ -73,17 +73,18 @@ class NotificationType(FastEnum):
 # required. NOTE: The notification object should be lightweight
 # and all attributes must be picklable.
 #
-class Notification():
-
-    def __init__(self,
-                 notification_type,
-                 *,
-                 full_name=None,
-                 job_action=None,
-                 job_status=None,
-                 time=None,
-                 element=None,
-                 message=None):
+class Notification:
+    def __init__(
+        self,
+        notification_type,
+        *,
+        full_name=None,
+        job_action=None,
+        job_status=None,
+        time=None,
+        element=None,
+        message=None
+    ):
         self.notification_type = notification_type
         self.full_name = full_name
         self.job_action = job_action
@@ -113,40 +114,36 @@ class Notification():
 #    interrupt_callback: A callback to handle ^C
 #    ticker_callback: A callback call once per second
 #
-class Scheduler():
-
-    def __init__(self, context,
-                 start_time, state, notification_queue, notifier):
+class Scheduler:
+    def __init__(self, context, start_time, state, notification_queue, notifier):
 
         #
         # Public members
         #
-        self.queues = None          # Exposed for the frontend to print summaries
-        self.context = context      # The Context object shared with Queues
-        self.terminated = False     # Whether the scheduler was asked to terminate or has terminated
-        self.suspended = False      # Whether the scheduler is currently suspended
+        self.queues = None  # Exposed for the frontend to print summaries
+        self.context = context  # The Context object shared with Queues
+        self.terminated = False  # Whether the scheduler was asked to terminate or has terminated
+        self.suspended = False  # Whether the scheduler is currently suspended
 
         # These are shared with the Job, but should probably be removed or made private in some way.
-        self.loop = None            # Shared for Job access to observe the message queue
-        self.internal_stops = 0     # Amount of SIGSTP signals we've introduced, this is shared with job.py
+        self.loop = None  # Shared for Job access to observe the message queue
+        self.internal_stops = 0  # Amount of SIGSTP signals we've introduced, this is shared with job.py
 
         #
         # Private members
         #
-        self._active_jobs = []                # Jobs currently being run in the scheduler
-        self._starttime = start_time          # Initial application start time
-        self._suspendtime = None              # Session time compensation for suspended state
-        self._queue_jobs = True               # Whether we should continue to queue jobs
+        self._active_jobs = []  # Jobs currently being run in the scheduler
+        self._starttime = start_time  # Initial application start time
+        self._suspendtime = None  # Session time compensation for suspended state
+        self._queue_jobs = True  # Whether we should continue to queue jobs
         self._state = state
-        self._casd_process = None             # handle to the casd process for monitoring purpose
+        self._casd_process = None  # handle to the casd process for monitoring purpose
 
         # Bidirectional queue to send notifications back to the Scheduler's owner
         self._notification_queue = notification_queue
         self._notifier = notifier
 
-        self.resources = Resources(context.sched_builders,
-                                   context.sched_fetchers,
-                                   context.sched_pushers)
+        self.resources = Resources(context.sched_builders, context.sched_fetchers, context.sched_pushers)
 
     # run()
     #
@@ -307,11 +304,13 @@ class Scheduler():
                 element_info = None
 
         # Now check for more jobs
-        notification = Notification(NotificationType.JOB_COMPLETE,
-                                    full_name=job.name,
-                                    job_action=job.action_name,
-                                    job_status=status,
-                                    element=element_info)
+        notification = Notification(
+            NotificationType.JOB_COMPLETE,
+            full_name=job.name,
+            job_action=job.action_name,
+            job_status=status,
+            element=element_info,
+        )
         self._notify(notification)
         self._sched()
 
@@ -357,10 +356,12 @@ class Scheduler():
     #
     def _start_job(self, job):
         self._active_jobs.append(job)
-        notification = Notification(NotificationType.JOB_START,
-                                    full_name=job.name,
-                                    job_action=job.action_name,
-                                    time=self._state.elapsed_time(start_time=self._starttime))
+        notification = Notification(
+            NotificationType.JOB_START,
+            full_name=job.name,
+            job_action=job.action_name,
+            time=self._state.elapsed_time(start_time=self._starttime),
+        )
         self._notify(notification)
         job.start()
 
@@ -396,9 +397,7 @@ class Scheduler():
             # to fetch tasks for elements which failed to pull, and
             # thus need all the pulls to complete before ever starting
             # a build
-            ready.extend(chain.from_iterable(
-                q.harvest_jobs() for q in reversed(self.queues)
-            ))
+            ready.extend(chain.from_iterable(q.harvest_jobs() for q in reversed(self.queues)))
 
             # harvest_jobs() may have decided to skip some jobs, making
             # them eligible for promotion to the next queue as a side effect.
@@ -468,7 +467,7 @@ class Scheduler():
             self.suspended = False
             # Notify that we're unsuspended
             self._notify(Notification(NotificationType.SUSPENDED))
-            self._starttime += (datetime.datetime.now() - self._suspendtime)
+            self._starttime += datetime.datetime.now() - self._suspendtime
             self._notify(Notification(NotificationType.SCHED_START_TIME, time=self._starttime))
             self._suspendtime = None
 
diff --git a/src/buildstream/_signals.py b/src/buildstream/_signals.py
index 31982c1..425a572 100644
--- a/src/buildstream/_signals.py
+++ b/src/buildstream/_signals.py
@@ -37,8 +37,8 @@ if TYPE_CHECKING:
 # typing.MutableSequence. However, that is only available in Python versions
 # 3.5.4 onward and 3.6.1 onward.
 # Debian 9 ships with 3.5.3.
-terminator_stack = deque()      # type: MutableSequence[Callable]
-suspendable_stack = deque()     # type: MutableSequence[Callable]
+terminator_stack = deque()  # type: MutableSequence[Callable]
+suspendable_stack = deque()  # type: MutableSequence[Callable]
 
 
 # Per process SIGTERM handler
@@ -47,16 +47,18 @@ def terminator_handler(signal_, frame):
         terminator_ = terminator_stack.pop()
         try:
             terminator_()
-        except:                               # noqa pylint: disable=bare-except
+        except:  # noqa pylint: disable=bare-except
             # Ensure we print something if there's an exception raised when
             # processing the handlers. Note that the default exception
             # handler won't be called because we os._exit next, so we must
             # catch all possible exceptions with the unqualified 'except'
             # clause.
             traceback.print_exc(file=sys.stderr)
-            print('Error encountered in BuildStream while processing custom SIGTERM handler:',
-                  terminator_,
-                  file=sys.stderr)
+            print(
+                "Error encountered in BuildStream while processing custom SIGTERM handler:",
+                terminator_,
+                file=sys.stderr,
+            )
 
     # Use special exit here, terminate immediately, recommended
     # for precisely this situation where child processes are teminated.
@@ -79,7 +81,7 @@ def terminator_handler(signal_, frame):
 #
 @contextmanager
 def terminator(terminate_func):
-    global terminator_stack                   # pylint: disable=global-statement
+    global terminator_stack  # pylint: disable=global-statement
 
     # Signal handling only works in the main thread
     if threading.current_thread() != threading.main_thread():
@@ -101,7 +103,7 @@ def terminator(terminate_func):
 
 
 # Just a simple object for holding on to two callbacks
-class Suspender():
+class Suspender:
     def __init__(self, suspend_callback, resume_callback):
         self.suspend = suspend_callback
         self.resume = resume_callback
@@ -144,7 +146,7 @@ def suspend_handler(sig, frame):
 #
 @contextmanager
 def suspendable(suspend_callback, resume_callback):
-    global suspendable_stack                  # pylint: disable=global-statement
+    global suspendable_stack  # pylint: disable=global-statement
 
     outermost = bool(not suspendable_stack)
     suspender = Suspender(suspend_callback, resume_callback)
diff --git a/src/buildstream/_site.py b/src/buildstream/_site.py
index 8940fa3..db05871 100644
--- a/src/buildstream/_site.py
+++ b/src/buildstream/_site.py
@@ -30,22 +30,22 @@ import subprocess
 root = os.path.dirname(os.path.abspath(__file__))
 
 # The Element plugin directory
-element_plugins = os.path.join(root, 'plugins', 'elements')
+element_plugins = os.path.join(root, "plugins", "elements")
 
 # The Source plugin directory
-source_plugins = os.path.join(root, 'plugins', 'sources')
+source_plugins = os.path.join(root, "plugins", "sources")
 
 # Default user configuration
-default_user_config = os.path.join(root, 'data', 'userconfig.yaml')
+default_user_config = os.path.join(root, "data", "userconfig.yaml")
 
 # Default project configuration
-default_project_config = os.path.join(root, 'data', 'projectconfig.yaml')
+default_project_config = os.path.join(root, "data", "projectconfig.yaml")
 
 # Script template to call module building scripts
-build_all_template = os.path.join(root, 'data', 'build-all.sh.in')
+build_all_template = os.path.join(root, "data", "build-all.sh.in")
 
 # Module building script template
-build_module_template = os.path.join(root, 'data', 'build-module.sh.in')
+build_module_template = os.path.join(root, "data", "build-module.sh.in")
 
 
 def get_bwrap_version():
@@ -53,7 +53,7 @@ def get_bwrap_version():
     #
     # returns None if no bwrap was found
     # otherwise returns a tuple of 3 int: major, minor, patch
-    bwrap_path = shutil.which('bwrap')
+    bwrap_path = shutil.which("bwrap")
 
     if not bwrap_path:
         return None
diff --git a/src/buildstream/_sourcecache.py b/src/buildstream/_sourcecache.py
index 28ad828..03e2d18 100644
--- a/src/buildstream/_sourcecache.py
+++ b/src/buildstream/_sourcecache.py
@@ -26,12 +26,10 @@ from .storage._casbaseddirectory import CasBasedDirectory
 from ._basecache import BaseCache
 from ._exceptions import CASError, CASRemoteError, SourceCacheError, RemoteError
 from . import utils
-from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, \
-    source_pb2, source_pb2_grpc
+from ._protos.buildstream.v2 import buildstream_pb2, buildstream_pb2_grpc, source_pb2, source_pb2_grpc
 
 
 class SourceRemote(BaseRemote):
-
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
... 39321 lines suppressed ...