You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by tv...@apache.org on 2021/01/26 02:34:02 UTC

[buildstream] 01/01: _project.py, _context.py: Load remote specs with RemoteSpec.new_from_node()

This is an automated email from the ASF dual-hosted git repository.

tvb pushed a commit to branch tristan/change-remote-config
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 36a5157ab75260021cb932dfcdcb0b55076b8dd1
Author: Tristan van Berkom <tr...@codethink.co.uk>
AuthorDate: Sun Jan 24 13:45:15 2021 +0900

    _project.py, _context.py: Load remote specs with RemoteSpec.new_from_node()
    
    Instead of using an additional function from the AssetCache structure.
    
    This is intended to help delegate ownership of configuration parsing to
    the project and context, and take this out of the hands of asset caches.
    
    Additional breaking change:
    
       We no longer support a dictionary for the artifact caches or source
       caches, this used to be supported in the case that one wanted to specify
       only a single cache.
    
       Rationale: it is more clear that it is a list if we force it to be
       a list. This feature could have been useful for convenience, except that
       it adds no convenience at all; when specifying a list with a single
       element or just specifying the dictionary directly, there is no additional
       yaml (only one space becomes a '-' instead of a ' ').
    
       Seeing as it wasn't buying any convenience, it seems pointless to keep
       this around.
    
    Summary of changes:
    
      * _project.py, _context.py: Parse the remote specs directly instead of delegating
                                  to the artifact caches which have no business parsing
                                  user/project configuration anyway.
    
      * _assetcache.py: Update remaining parsing code.
    
      * tests: Updated test cases to ensure we always specify caches as lists.
---
 src/buildstream/_assetcache.py       | 19 ++----------
 src/buildstream/_context.py          | 19 ++++++++----
 src/buildstream/_project.py          | 28 ++++++++++--------
 tests/artifactcache/capabilities.py  |  2 +-
 tests/artifactcache/config.py        | 57 ++++++++++--------------------------
 tests/artifactcache/junctions.py     |  2 +-
 tests/artifactcache/pull.py          |  4 +--
 tests/artifactcache/push.py          |  4 +--
 tests/frontend/artifact_checkout.py  |  2 +-
 tests/frontend/artifact_delete.py    |  2 +-
 tests/frontend/artifact_pull.py      |  2 +-
 tests/frontend/artifact_show.py      |  2 +-
 tests/frontend/buildcheckout.py      |  6 ++--
 tests/frontend/default_target.py     |  2 +-
 tests/frontend/pull.py               | 26 ++++++++--------
 tests/frontend/push.py               | 40 +++++++++++--------------
 tests/frontend/remote-caches.py      |  8 ++---
 tests/frontend/workspace.py          |  2 +-
 tests/integration/artifact.py        |  6 ++--
 tests/integration/cachedfail.py      |  4 +--
 tests/integration/pullbuildtrees.py  | 12 ++++----
 tests/integration/shell.py           |  2 +-
 tests/integration/shellbuildtrees.py | 10 +++----
 tests/remotecache/simple.py          |  2 +-
 tests/remoteexecution/buildtree.py   |  2 +-
 tests/remoteexecution/partial.py     |  5 +---
 tests/sourcecache/capabilities.py    |  2 +-
 tests/sourcecache/fetch.py           |  2 +-
 tests/sourcecache/push.py            | 10 +++----
 tests/sourcecache/workspace.py       |  6 +++-
 30 files changed, 128 insertions(+), 162 deletions(-)

diff --git a/src/buildstream/_assetcache.py b/src/buildstream/_assetcache.py
index 71c9cb6..eff9de9 100644
--- a/src/buildstream/_assetcache.py
+++ b/src/buildstream/_assetcache.py
@@ -23,9 +23,9 @@ from typing import TYPE_CHECKING
 import grpc
 
 from . import utils
-from . import _yaml
+from .node import MappingNode
 from ._cas import CASRemote
-from ._exceptions import AssetCacheError, LoadError, RemoteError
+from ._exceptions import AssetCacheError, RemoteError
 from ._remotespec import RemoteSpec, RemoteType
 from ._remote import BaseRemote
 from ._protos.build.bazel.remote.asset.v1 import remote_asset_pb2, remote_asset_pb2_grpc
@@ -317,20 +317,7 @@ class AssetCache:
     @classmethod
     def specs_from_config_node(cls, config_node, basedir=None):
         cache_specs = []
-
-        try:
-            artifacts = [config_node.get_mapping(cls.config_node_name)]
-        except LoadError:
-            try:
-                artifacts = config_node.get_sequence(cls.config_node_name, default=[])
-            except LoadError:
-                provenance = config_node.get_node(cls.config_node_name).get_provenance()
-                raise _yaml.LoadError(
-                    "{}: '{}' must be a single remote mapping, or a list of mappings".format(
-                        provenance, cls.config_node_name
-                    ),
-                    _yaml.LoadErrorReason.INVALID_DATA,
-                )
+        artifacts = config_node.get_sequence(cls.config_node_name, default=[], allowed_types=[MappingNode])
 
         for spec_node in artifacts:
             cache_specs.append(RemoteSpec.new_from_node(spec_node))
diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index bae97ab..e5df670 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -29,12 +29,12 @@ from ._profile import Topics, PROFILER
 from ._platform import Platform
 from ._artifactcache import ArtifactCache
 from ._elementsourcescache import ElementSourcesCache
-from ._remotespec import RemoteExecutionSpec
+from ._remotespec import RemoteSpec, RemoteExecutionSpec
 from ._sourcecache import SourceCache
 from ._cas import CASCache, CASLogLevel
 from .types import _CacheBuildTrees, _PipelineSelection, _SchedulerErrorAction
 from ._workspaces import Workspaces, WorkspaceProjectCache
-from .node import Node
+from .node import Node, MappingNode
 
 
 # Context()
@@ -327,11 +327,18 @@ class Context:
                 LoadErrorReason.INVALID_DATA,
             ) from e
 
-        # Load artifact share configuration
-        self.artifact_cache_specs = ArtifactCache.specs_from_config_node(defaults)
 
-        # Load source cache config
-        self.source_cache_specs = SourceCache.specs_from_config_node(defaults)
+        # Load artifact remote specs
+        caches = config.get_sequence("artifacts", default=[], allowed_types=[MappingNode])
+        for node in caches:
+            spec = RemoteSpec.new_from_node(node)
+            self.artifact_cache_specs.append(spec)
+
+        # Load source cache remote specs
+        caches = config.get_sequence("source-caches", default=[], allowed_types=[MappingNode])
+        for node in caches:
+            spec = RemoteSpec.new_from_node(node)
+            self.source_cache_specs.append(spec)
 
         # Load the global remote execution config including pull-artifact-files setting
         remote_execution = defaults.get_mapping("remote-execution", default=None)
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 6dbc120..3206450 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -33,9 +33,7 @@ from ._profile import Topics, PROFILER
 from ._exceptions import LoadError
 from .exceptions import LoadErrorReason
 from ._options import OptionPool
-from ._artifactcache import ArtifactCache
-from ._sourcecache import SourceCache
-from .node import ScalarNode, SequenceNode, _assert_symbol_name
+from .node import ScalarNode, SequenceNode, MappingNode, _assert_symbol_name
 from ._pluginfactory import ElementFactory, SourceFactory, load_plugin_origin
 from .types import CoreWarnings
 from ._projectrefs import ProjectRefs, ProjectRefStorage
@@ -47,7 +45,7 @@ from ._remotespec import RemoteSpec
 
 
 if TYPE_CHECKING:
-    from .node import ProvenanceInformation, MappingNode
+    from .node import ProvenanceInformation
     from ._context import Context
 
 
@@ -135,7 +133,7 @@ class Project:
         self.config: ProjectConfig = ProjectConfig()
         self.first_pass_config: ProjectConfig = ProjectConfig()
 
-        self.base_environment: Union["MappingNode", Dict[str, str]] = {}  # The base set of environment variables
+        self.base_environment: Union[MappingNode, Dict[str, str]] = {}  # The base set of environment variables
         self.base_env_nocache: List[str] = []  # The base nocache mask (list) for the environment
 
         # Remote specs for communicating with remote services
@@ -145,8 +143,8 @@ class Project:
         self.element_factory: Optional[ElementFactory] = None  # ElementFactory for loading elements
         self.source_factory: Optional[SourceFactory] = None  # SourceFactory for loading sources
 
-        self.sandbox: Optional["MappingNode"] = None
-        self.splits: Optional["MappingNode"] = None
+        self.sandbox: Optional[MappingNode] = None
+        self.splits: Optional[MappingNode] = None
 
         #
         # Private members
@@ -864,11 +862,17 @@ class Project:
         # the values from our loaded configuration dictionary.
         #
 
-        # Load artifacts pull/push configuration for this project
-        self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
-
-        # Load source caches with pull/push config
-        self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
+        # Load artifact remote specs
+        caches = config.get_sequence("artifacts", default=[], allowed_types=[MappingNode])
+        for node in caches:
+            spec = RemoteSpec.new_from_node(node, self.directory)
+            self.artifact_cache_specs.append(spec)
+
+        # Load source cache remote specs
+        caches = config.get_sequence("source-caches", default=[], allowed_types=[MappingNode])
+        for node in caches:
+            spec = RemoteSpec.new_from_node(node, self.directory)
+            self.source_cache_specs.append(spec)
 
         # Load sandbox environment variables
         self.base_environment = config.get_mapping("environment")
diff --git a/tests/artifactcache/capabilities.py b/tests/artifactcache/capabilities.py
index c8a49f9..0f2f1aa 100644
--- a/tests/artifactcache/capabilities.py
+++ b/tests/artifactcache/capabilities.py
@@ -27,7 +27,7 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 45682ca..1b8cbad 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -41,28 +41,12 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
         user_caches = []
 
     user_config = {}
-    if len(user_caches) == 1:
-        user_config["artifacts"] = {
-            "url": user_caches[0].url,
-            "push": user_caches[0].push,
-            "type": type_strings[user_caches[0].remote_type],
-        }
-    elif len(user_caches) > 1:
+    if user_caches:
         user_config["artifacts"] = [
             {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]} for cache in user_caches
         ]
 
-    if len(override_caches) == 1:
-        user_config["projects"] = {
-            "test": {
-                "artifacts": {
-                    "url": override_caches[0].url,
-                    "push": override_caches[0].push,
-                    "type": type_strings[override_caches[0].remote_type],
-                }
-            }
-        }
-    elif len(override_caches) > 1:
+    if override_caches:
         user_config["projects"] = {
             "test": {
                 "artifacts": [
@@ -74,25 +58,14 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
 
     project_config = {}
     if project_caches:
-        if len(project_caches) == 1:
-            project_config.update(
-                {
-                    "artifacts": {
-                        "url": project_caches[0].url,
-                        "push": project_caches[0].push,
-                        "type": type_strings[project_caches[0].remote_type],
-                    }
-                }
-            )
-        elif len(project_caches) > 1:
-            project_config.update(
-                {
-                    "artifacts": [
-                        {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
-                        for cache in project_caches
-                    ]
-                }
-            )
+        project_config.update(
+            {
+                "artifacts": [
+                    {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
+                    for cache in project_caches
+                ]
+            }
+        )
 
     return user_config, project_config
 
@@ -199,10 +172,12 @@ def test_only_one(cli, datafiles, override_caches, project_caches, user_caches):
 @pytest.mark.parametrize(
     "artifacts_config",
     (
-        {
-            "url": "http://localhost.test",
-            "auth": {"server-cert": "~/server.crt", "client-cert": "~/client.crt", "client-key": "~/client.key",},
-        },
+        [
+            {
+                "url": "http://localhost.test",
+                "auth": {"server-cert": "~/server.crt", "client-cert": "~/client.crt", "client-key": "~/client.key",},
+            }
+        ],
         [
             {
                 "url": "http://localhost.test",
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index e1b7dbf..c180de4 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -17,7 +17,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "junctions"
 def project_set_artifacts(project, url):
     project_conf_file = os.path.join(project, "project.conf")
     project_config = _yaml.load(project_conf_file, shortname=None)
-    project_config["artifacts"] = {"url": url, "push": True}
+    project_config["artifacts"] = [{"url": url, "push": True}]
     _yaml.roundtrip_dump(project_config.strip_node_info(), file=project_conf_file)
 
 
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index 63e6d98..fc27e75 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -41,7 +41,7 @@ def test_pull(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
 
@@ -106,7 +106,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": rootcache_dir,
         }
 
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 74062ce..02ff259 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -67,7 +67,7 @@ def test_push(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": rootcache_dir,
         }
 
@@ -124,7 +124,7 @@ def test_push_message(tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": rootcache_dir,
         }
 
diff --git a/tests/frontend/artifact_checkout.py b/tests/frontend/artifact_checkout.py
index 1375bc3..329d2df 100644
--- a/tests/frontend/artifact_checkout.py
+++ b/tests/frontend/artifact_checkout.py
@@ -39,7 +39,7 @@ def test_checkout(cli, tmpdir, datafiles, deps, expect_exist, expect_noexist, wi
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Build the element to push it to cache
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         # Build it
         result = cli.run(project=project, args=["build", "target-import.bst"])
diff --git a/tests/frontend/artifact_delete.py b/tests/frontend/artifact_delete.py
index 37b9731..90d448b 100644
--- a/tests/frontend/artifact_delete.py
+++ b/tests/frontend/artifact_delete.py
@@ -152,7 +152,7 @@ def test_artifact_delete_pulled_artifact_without_buildtree(cli, tmpdir, datafile
     local_cache = os.path.join(str(tmpdir), "artifacts")
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
         cli.configure(
-            {"artifacts": {"url": remote.repo, "push": True}, "cachedir": local_cache,}
+            {"artifacts": [{"url": remote.repo, "push": True}], "cachedir": local_cache,}
         )
 
         # Build the element
diff --git a/tests/frontend/artifact_pull.py b/tests/frontend/artifact_pull.py
index 4fa6b19..926f96e 100644
--- a/tests/frontend/artifact_pull.py
+++ b/tests/frontend/artifact_pull.py
@@ -41,7 +41,7 @@ def test_pull(cli, tmpdir, datafiles, deps, expect_cached, with_project):
         # Build the element to push it to cache, and explicitly configure local cache so we can check it
         local_cache = os.path.join(str(tmpdir), "cache")
         cli.configure(
-            {"cachedir": local_cache, "artifacts": {"url": share.repo, "push": True},}
+            {"cachedir": local_cache, "artifacts": [{"url": share.repo, "push": True}],}
         )
 
         # Build it
diff --git a/tests/frontend/artifact_show.py b/tests/frontend/artifact_show.py
index 2a7131c..7e8f8ee 100644
--- a/tests/frontend/artifact_show.py
+++ b/tests/frontend/artifact_show.py
@@ -184,7 +184,7 @@ def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
     local_cache = os.path.join(str(tmpdir), "artifacts")
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
         cli.configure(
-            {"artifacts": {"url": remote.repo, "push": True}, "cachedir": local_cache,}
+            {"artifacts": [{"url": remote.repo, "push": True}], "cachedir": local_cache,}
         )
 
         # Build the element
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 6d11906..3a1d650 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -124,7 +124,7 @@ def test_non_strict_pull_build_strict_checkout(datafiles, cli, tmpdir):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo}})
+        cli.configure({"artifacts": [{"url": share.repo}]})
 
         # First build it in non-strict mode with an artifact server configured.
         # With this configuration BuildStream will attempt to pull the build-only
@@ -1089,7 +1089,7 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         result = cli.run(project=project, args=["source", "track", input_name])
         result.assert_success()
@@ -1124,7 +1124,7 @@ def test_partial_checkout_fail(tmpdir, datafiles, cli):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         res = cli.run(project=project, args=["artifact", "checkout", "--pull", build_elt, "--directory", checkout_dir])
         res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
diff --git a/tests/frontend/default_target.py b/tests/frontend/default_target.py
index 60578bb..f6573c6 100644
--- a/tests/frontend/default_target.py
+++ b/tests/frontend/default_target.py
@@ -177,7 +177,7 @@ def test_default_target_push_pull(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Push the artifacts
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["artifact", "push"])
         result.assert_success()
 
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index f873ad9..b721ca8 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -46,7 +46,7 @@ def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", target])
         result.assert_success()
 
@@ -137,9 +137,7 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
 
         # Configure the default push location to be bad_share; we will assert that
         # nothing actually gets pushed there.
-        cli.configure(
-            {"artifacts": {"url": bad_share.repo, "push": True},}
-        )
+        cli.configure({"artifacts": [{"url": bad_share.repo, "push": True},]})
 
         # Now try `bst artifact push` to the good_share.
         result = cli.run(project=project, args=["artifact", "push", "target.bst", "--remote", good_share.repo])
@@ -175,7 +173,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "projects": {"test": {"strict": False}}})
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
         assert cli.get_element_state(project, "target.bst") == "cached"
@@ -225,7 +223,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
         generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"])
         result.assert_success()
         assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
@@ -290,7 +288,7 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
     project = str(datafiles)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         _test_pull_missing_blob(cli, project, share, share)
 
@@ -345,7 +343,7 @@ def test_pull_missing_local_blob(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the import-bin element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         result = cli.run(project=project, args=["source", "track", input_name])
         result.assert_success()
@@ -376,7 +374,7 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo}})
+        cli.configure({"artifacts": [{"url": share.repo}]})
         result = cli.run(project=project, args=["build", "target.bst"])
 
         result.assert_success()
@@ -397,10 +395,10 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
 
         # Add shareproject repo url to project.conf
         with open(os.path.join(project, "project.conf"), "a") as projconf:
-            projconf.write("artifacts:\n  url: {}\n  push: True".format(shareproject.repo))
+            projconf.write("artifacts:\n- url: {}\n  push: True".format(shareproject.repo))
 
         # Configure shareuser remote in user conf
-        cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": shareuser.repo, "push": True}]})
 
         # Push the artifacts to the shareuser and shareproject remotes.
         # Assert that shareuser and shareproject have the artfifacts cached,
@@ -457,7 +455,7 @@ def test_pull_access_rights(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", "compose-all.bst"])
         result.assert_success()
 
@@ -519,7 +517,7 @@ def test_pull_artifact(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         result = cli.run(project=project, args=["build", element])
         result.assert_success()
@@ -557,7 +555,7 @@ def test_dynamic_build_plan(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", target])
         result.assert_success()
 
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 4e39c22..8f71c8a 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -71,9 +71,7 @@ def test_push(cli, tmpdir, datafiles):
 
             # Configure bst to pull but not push from a cache and run `bst artifact push`.
             # This should also fail.
-            cli.configure(
-                {"artifacts": {"url": share1.repo, "push": False},}
-            )
+            cli.configure({"artifacts": [{"url": share1.repo, "push": False},]})
             result = cli.run(project=project, args=["artifact", "push", "target.bst"])
             result.assert_main_error(ErrorDomain.STREAM, None)
 
@@ -127,7 +125,7 @@ def test_push_artifact(cli, tmpdir, datafiles):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": {"url": share.repo, "push": True,},
+                "artifacts": [{"url": share.repo, "push": True,}],
             }
         )
 
@@ -163,7 +161,7 @@ def test_push_artifact_glob(cli, tmpdir, datafiles):
         assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
 
         # Configure artifact share
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         # Run bst artifact push with a wildcard, there is only one artifact
         # matching "test/target/*", even though it can be accessed both by it's
@@ -282,7 +280,7 @@ def test_push_deps(cli, tmpdir, datafiles, deps, expected_states):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": {"url": share.repo, "push": True,},
+                "artifacts": [{"url": share.repo, "push": True,}],
             }
         )
 
@@ -331,7 +329,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": {"url": share.repo, "push": True,},
+                "artifacts": [{"url": share.repo, "push": True,}],
             }
         )
 
@@ -354,9 +352,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
 
         # Set the scene: share1 has the artifact, share2 does not.
         #
-        cli.configure(
-            {"artifacts": {"url": share1.repo, "push": True},}
-        )
+        cli.configure({"artifacts": [{"url": share1.repo, "push": True},]})
 
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
@@ -400,9 +396,7 @@ def test_artifact_expires(cli, datafiles, tmpdir):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
 
         # Configure bst to push to the cache
-        cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
-        )
+        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
 
         # Create and build an element of 15 MB
         create_element_size("element1.bst", project, element_path, [], int(15e6))
@@ -453,7 +447,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
 
         # Configure bst to push to the remote cache
         cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
+            {"artifacts": [{"url": share.repo, "push": True}],}
         )
 
         # Create and push a 3MB element
@@ -506,7 +500,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
 
         # Configure bst to push to the cache
         cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
+            {"artifacts": [{"url": share.repo, "push": True}],}
         )
 
         # Create and build 2 elements, one 5 MB and one 15 MB.
@@ -570,7 +564,7 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
+            {"artifacts": [{"url": share.repo, "push": True}],}
         )
         cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"])
 
@@ -585,7 +579,7 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", "target.bst"])
 
         result.assert_success()
@@ -610,10 +604,10 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
 
         # Add shareproject repo url to project.conf
         with open(os.path.join(project, "project.conf"), "a") as projconf:
-            projconf.write("artifacts:\n  url: {}\n  push: True".format(shareproject.repo))
+            projconf.write("artifacts:\n- url: {}\n  push: True".format(shareproject.repo))
 
         # Configure shareuser remote in user conf
-        cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": shareuser.repo, "push": True}]})
 
         result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
 
@@ -640,7 +634,7 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
     caplog.set_level(1)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "projects": {"test": {"strict": False}}})
 
         # First get us a build
         result = cli.run(project=project, args=["build", "target.bst"])
@@ -699,7 +693,7 @@ def test_push_after_rebuild(cli, tmpdir, datafiles):
     assert cli.get_element_state(project, "random.bst") != "cached"
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         # Now rebuild the element and push it
         result = cli.run(project=project, args=["build", "random.bst"])
@@ -724,7 +718,7 @@ def test_push_update_after_rebuild(cli, tmpdir, datafiles):
     )
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         # Build the element and push the artifact
         result = cli.run(project=project, args=["build", "random.bst"])
@@ -743,6 +737,6 @@ def test_push_update_after_rebuild(cli, tmpdir, datafiles):
         assert cli.get_element_state(project, "random.bst") == "cached"
 
         # Push the new build
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["artifact", "push", "random.bst"])
         assert result.get_pushed_elements() == ["random.bst"]
diff --git a/tests/frontend/remote-caches.py b/tests/frontend/remote-caches.py
index ebafddf..03d728d 100644
--- a/tests/frontend/remote-caches.py
+++ b/tests/frontend/remote-caches.py
@@ -45,8 +45,8 @@ def test_source_artifact_caches(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
-            "artifacts": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": cachedir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -80,8 +80,8 @@ def test_source_cache_empty_artifact_cache(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
-            "artifacts": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": cachedir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 813e681..3a8bc22 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -1030,7 +1030,7 @@ def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
         result = cli.run(project=project, args=["-C", workspace, "build", element_name])
         result.assert_success()
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         result = cli.run(project=project, args=["-C", workspace, "artifact", "push", *arg_elm])
         result.assert_success()
diff --git a/tests/integration/artifact.py b/tests/integration/artifact.py
index f7d62a5..faefa8f 100644
--- a/tests/integration/artifact.py
+++ b/tests/integration/artifact.py
@@ -55,7 +55,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "share1")) as share1, create_artifact_share(
         os.path.join(str(tmpdir), "share2")
     ) as share2, create_artifact_share(os.path.join(str(tmpdir), "share3")) as share3:
-        cli.configure({"artifacts": {"url": share1.repo, "push": True}, "cachedir": str(tmpdir)})
+        cli.configure({"artifacts": [{"url": share1.repo, "push": True}], "cachedir": str(tmpdir)})
 
         # Build autotools element with the default behavior of caching buildtrees
         # only when necessary. The artifact should be successfully pushed to the share1 remote
@@ -98,7 +98,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
 
         # Repeat building the artifacts, this time with cache-buildtrees set to
         # 'always' via the cli, as such the buildtree dir should not be empty
-        cli.configure({"artifacts": {"url": share2.repo, "push": True}, "cachedir": str(tmpdir)})
+        cli.configure({"artifacts": [{"url": share2.repo, "push": True}], "cachedir": str(tmpdir)})
         result = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
         assert result.exit_code == 0
         assert cli.get_element_state(project, element_name) == "cached"
@@ -127,7 +127,7 @@ def test_cache_buildtrees(cli, tmpdir, datafiles):
         # a build
         cli.configure(
             {
-                "artifacts": {"url": share3.repo, "push": True},
+                "artifacts": [{"url": share3.repo, "push": True}],
                 "cachedir": str(tmpdir),
                 "cache": {"cache-buildtrees": "always"},
             }
diff --git a/tests/integration/cachedfail.py b/tests/integration/cachedfail.py
index f72c315..3b59199 100644
--- a/tests/integration/cachedfail.py
+++ b/tests/integration/cachedfail.py
@@ -126,7 +126,7 @@ def test_push_cached_fail(cli, tmpdir, datafiles, on_error):
 
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as share:
         cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
+            {"artifacts": [{"url": share.repo, "push": True}],}
         )
 
         # Build the element, continuing to finish active jobs on error.
@@ -169,7 +169,7 @@ def test_push_failed_missing_shell(cli, tmpdir, datafiles, on_error):
 
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as share:
         cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
+            {"artifacts": [{"url": share.repo, "push": True}],}
         )
 
         # Build the element, continuing to finish active jobs on error.
diff --git a/tests/integration/pullbuildtrees.py b/tests/integration/pullbuildtrees.py
index 5923ce5..eeeb7a5 100644
--- a/tests/integration/pullbuildtrees.py
+++ b/tests/integration/pullbuildtrees.py
@@ -26,7 +26,7 @@ def default_state(cli, tmpdir, share):
     shutil.rmtree(os.path.join(str(tmpdir), "cas"))
     cli.configure(
         {
-            "artifacts": {"url": share.repo, "push": False},
+            "artifacts": [{"url": share.repo, "push": False}],
             "cachedir": str(tmpdir),
             "cache": {"pull-buildtrees": False},
         }
@@ -50,7 +50,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
     ) as share2, create_artifact_share(os.path.join(str(tmpdir), "share3")) as share3:
         cli2.configure(
             {
-                "artifacts": {"url": share1.repo, "push": True},
+                "artifacts": [{"url": share1.repo, "push": True}],
                 "cachedir": str(tmpdir),
                 "cache": {"cache-buildtrees": "always"},
             }
@@ -117,7 +117,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
         # to share2
         result = cli2.run(project=project, args=["artifact", "pull", element_name])
         assert element_name in result.get_pulled_elements()
-        cli2.configure({"artifacts": {"url": share2.repo, "push": True}})
+        cli2.configure({"artifacts": [{"url": share2.repo, "push": True}]})
         result = cli2.run(project=project, args=["artifact", "push", element_name])
         assert element_name not in result.get_pushed_elements()
         assert not share2.get_artifact(cli2.get_artifact_name(project, "test", element_name))
@@ -125,10 +125,10 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
         # Assert that after pulling the missing buildtree the element artifact can be
         # successfully pushed to the remote. This will attempt to pull the buildtree
         # from share1 and then a 'complete' push to share2
-        cli2.configure({"artifacts": {"url": share1.repo, "push": False}})
+        cli2.configure({"artifacts": [{"url": share1.repo, "push": False}]})
         result = cli2.run(project=project, args=["--pull-buildtrees", "artifact", "pull", element_name])
         assert element_name in result.get_pulled_elements()
-        cli2.configure({"artifacts": {"url": share2.repo, "push": True}})
+        cli2.configure({"artifacts": [{"url": share2.repo, "push": True}]})
         result = cli2.run(project=project, args=["artifact", "push", element_name])
         assert element_name in result.get_pushed_elements()
         assert share2.get_artifact(cli2.get_artifact_name(project, "test", element_name))
@@ -140,7 +140,7 @@ def test_pullbuildtrees(cli2, tmpdir, datafiles):
         # artifact cannot be pushed.
         result = cli2.run(project=project, args=["artifact", "pull", element_name])
         assert element_name in result.get_pulled_elements()
-        cli2.configure({"artifacts": {"url": share3.repo, "push": True}})
+        cli2.configure({"artifacts": [{"url": share3.repo, "push": True}]})
         result = cli2.run(project=project, args=["--pull-buildtrees", "artifact", "push", element_name])
         assert element_name not in result.get_pulled_elements()
         with cli2.artifact.extract_buildtree(cwd, cwd, artifact_name) as buildtreedir:
diff --git a/tests/integration/shell.py b/tests/integration/shell.py
index a022d86..308b398 100644
--- a/tests/integration/shell.py
+++ b/tests/integration/shell.py
@@ -359,7 +359,7 @@ def test_integration_partial_artifact(cli, datafiles, tmpdir, integration_cache)
 
     # push to an artifact server so we can pull from it later.
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", element_name])
         result.assert_success()
 
diff --git a/tests/integration/shellbuildtrees.py b/tests/integration/shellbuildtrees.py
index 47ca9f6..cd38839 100644
--- a/tests/integration/shellbuildtrees.py
+++ b/tests/integration/shellbuildtrees.py
@@ -130,7 +130,7 @@ def create_built_artifact_share(tmpdir, cache_buildtrees, integration_cache):
 
     # Create a Cli instance to build and populate the share
     cli = Cli(os.path.join(tmpdir, "cache"))
-    cli.configure({"artifacts": {"url": share.repo, "push": True}, "sourcedir": integration_cache.sources})
+    cli.configure({"artifacts": [{"url": share.repo, "push": True}], "sourcedir": integration_cache.sources})
 
     # Optionally cache build trees
     args = []
@@ -240,7 +240,7 @@ def test_shell_use_cached_buildtree(share_with_buildtrees, datafiles, cli, pull_
     project = str(datafiles)
     element_name = "build-shell/buildtree.bst"
 
-    cli.configure({"artifacts": {"url": share_with_buildtrees.repo}})
+    cli.configure({"artifacts": [{"url": share_with_buildtrees.repo}]})
 
     # Optionally pull the buildtree along with `bst artifact pull`
     maybe_pull_deps(cli, project, element_name, pull_deps, pull_buildtree)
@@ -280,7 +280,7 @@ def test_shell_pull_cached_buildtree(share_with_buildtrees, datafiles, cli, pull
     project = str(datafiles)
     element_name = "build-shell/buildtree.bst"
 
-    cli.configure({"artifacts": {"url": share_with_buildtrees.repo}})
+    cli.configure({"artifacts": [{"url": share_with_buildtrees.repo}]})
 
     # Optionally pull the buildtree along with `bst artifact pull`
     maybe_pull_deps(cli, project, element_name, pull_deps, pull_buildtree)
@@ -319,7 +319,7 @@ def test_shell_use_uncached_buildtree(share_without_buildtrees, datafiles, cli):
     project = str(datafiles)
     element_name = "build-shell/buildtree.bst"
 
-    cli.configure({"artifacts": {"url": share_without_buildtrees.repo}})
+    cli.configure({"artifacts": [{"url": share_without_buildtrees.repo}]})
 
     # Pull everything we would need
     maybe_pull_deps(cli, project, element_name, "all", True)
@@ -342,7 +342,7 @@ def test_shell_pull_uncached_buildtree(share_without_buildtrees, datafiles, cli)
     project = str(datafiles)
     element_name = "build-shell/buildtree.bst"
 
-    cli.configure({"artifacts": {"url": share_without_buildtrees.repo}})
+    cli.configure({"artifacts": [{"url": share_without_buildtrees.repo}]})
 
     # Run the shell and request that required artifacts and buildtrees should be pulled
     result = cli.run(
diff --git a/tests/remotecache/simple.py b/tests/remotecache/simple.py
index 74c44ae..4f8896a 100644
--- a/tests/remotecache/simple.py
+++ b/tests/remotecache/simple.py
@@ -57,7 +57,7 @@ def test_remote_autotools_build_no_cache(cli, datafiles):
     checkout = os.path.join(cli.directory, "checkout")
     element_name = "autotools/amhello.bst"
 
-    cli.configure({"artifacts": {"url": "http://fake.url.service", "push": True}})
+    cli.configure({"artifacts": [{"url": "http://fake.url.service", "push": True}]})
     result = cli.run(project=project, args=["build", element_name])
     result.assert_success()
 
diff --git a/tests/remoteexecution/buildtree.py b/tests/remoteexecution/buildtree.py
index 317747f..4586286 100644
--- a/tests/remoteexecution/buildtree.py
+++ b/tests/remoteexecution/buildtree.py
@@ -41,7 +41,7 @@ def test_buildtree_remote(cli, tmpdir, datafiles):
     assert set(services) == set(["action-cache", "execution", "storage"])
 
     with create_artifact_share(share_path) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}, "cache": {"pull-buildtrees": False}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "cache": {"pull-buildtrees": False}})
 
         res = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
         res.assert_success()
diff --git a/tests/remoteexecution/partial.py b/tests/remoteexecution/partial.py
index ec5fabe..7688df4 100644
--- a/tests/remoteexecution/partial.py
+++ b/tests/remoteexecution/partial.py
@@ -70,10 +70,7 @@ def test_build_partial_push(cli, tmpdir, datafiles):
         services = cli.ensure_services()
         assert set(services) == set(["action-cache", "execution", "storage"])
 
-        cli.config["artifacts"] = {
-            "url": share.repo,
-            "push": True,
-        }
+        cli.config["artifacts"] = [{"url": share.repo, "push": True,}]
 
         res = cli.run(project=project, args=["build", element_name])
         res.assert_success()
diff --git a/tests/sourcecache/capabilities.py b/tests/sourcecache/capabilities.py
index 9d41eba..964b458 100644
--- a/tests/sourcecache/capabilities.py
+++ b/tests/sourcecache/capabilities.py
@@ -25,7 +25,7 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
         # Configure artifact share
         cache_dir = os.path.join(str(tmpdir), "cache")
         user_config_file = str(tmpdir.join("buildstream.conf"))
-        user_config = {"scheduler": {"pushers": 1}, "source-caches": {"url": share.repo,}, "cachedir": cache_dir}
+        user_config = {"scheduler": {"pushers": 1}, "source-caches": [{"url": share.repo,}], "cachedir": cache_dir}
         _yaml.roundtrip_dump(user_config, file=user_config_file)
 
         with dummy_context(config=user_config_file) as context:
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index 76f5508..7b2c63a 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -51,7 +51,7 @@ def context_with_source_cache(cli, cache, share, tmpdir):
     user_config_file = str(tmpdir.join("buildstream.conf"))
     user_config = {
         "scheduler": {"pushers": 1},
-        "source-caches": {"url": share.repo,},
+        "source-caches": [{"url": share.repo,}],
         "cachedir": cache,
     }
     _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index 25a4309..bc591ad 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -115,7 +115,7 @@ def test_source_push(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -165,7 +165,7 @@ def test_push_pull(cli, datafiles, tmpdir):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -203,7 +203,7 @@ def test_push_fail(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -234,7 +234,7 @@ def test_source_push_build_fail(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         cli.configure(user_config)
@@ -275,7 +275,7 @@ def test_push_missing_source_after_build(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/sourcecache/workspace.py b/tests/sourcecache/workspace.py
index bb1ea50..7dccbe0 100644
--- a/tests/sourcecache/workspace.py
+++ b/tests/sourcecache/workspace.py
@@ -72,7 +72,11 @@ def test_workspace_open_no_source_push(tmpdir, datafiles, cli):
 
     with create_artifact_share(share_dir) as share:
         cli.configure(
-            {"cachedir": cache_dir, "scheduler": {"pushers": 1}, "source-caches": {"url": share.repo, "push": True,},}
+            {
+                "cachedir": cache_dir,
+                "scheduler": {"pushers": 1},
+                "source-caches": [{"url": share.repo, "push": True,}],
+            }
         )
 
         # Fetch as in previous test and check it pushes the source