You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by tv...@apache.org on 2021/01/24 13:37:13 UTC

[buildstream] 06/06: _project.py: Load remote specs with RemoteSpec.new_from_node()

This is an automated email from the ASF dual-hosted git repository.

tvb pushed a commit to branch tristan/change-remote-config
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 5efe22bbebd5e2ddd2ee1bcc2e296ab42d0bba7f
Author: Tristan van Berkom <tr...@codethink.co.uk>
AuthorDate: Sun Jan 24 13:45:15 2021 +0900

    _project.py: Load remote specs with RemoteSpec.new_from_node()
    
    Instead of using an additional function from the AssetCache structure.
    
    This is intended to help delegate ownership of configuration parsing to
    the project and context, and take this out of the hands of asset caches.
    
    Additional breaking change:
    
       We no longer support a dictionary for the artifact caches or source
       caches, this used to be supported in the case that one wanted to specify
       only a single cache.
    
       Rationale: it is more clear that it is a list if we force it to be
       a list. This feature could have been useful for convenience, except that
       it adds no convenience at all; when specifying a list with a single
       element or just specifying the dictionary directly, there is no additional
       yaml (only one space becomes a '-' instead of a ' ').
    
       Seeing as it wasn't buying any convenience, it seems pointless to keep
       this around.
    
    Summary of changes:
    
      * project.py: Parse the remote specs directly instead of delegating
                    to the artifact caches which have no business parsing
    		user/project configuration anyway.
    
      * _assetcache.py: Update remaining parsing code.
    
      * tests: Updated test cases to ensure we always specify caches as lists.
---
 src/buildstream/_assetcache.py      | 16 ++---------
 src/buildstream/_project.py         | 14 ++++++---
 tests/artifactcache/capabilities.py |  2 +-
 tests/artifactcache/config.py       | 57 +++++++++++--------------------------
 tests/artifactcache/junctions.py    |  2 +-
 tests/artifactcache/pull.py         |  4 +--
 tests/artifactcache/push.py         |  4 +--
 tests/frontend/artifact_checkout.py |  2 +-
 tests/frontend/artifact_delete.py   |  2 +-
 tests/frontend/artifact_pull.py     |  2 +-
 tests/frontend/artifact_show.py     |  2 +-
 tests/frontend/buildcheckout.py     |  6 ++--
 tests/frontend/default_target.py    |  2 +-
 tests/frontend/pull.py              | 26 ++++++++---------
 tests/frontend/push.py              | 40 +++++++++++---------------
 tests/frontend/remote-caches.py     |  8 +++---
 tests/frontend/workspace.py         |  2 +-
 tests/remotecache/simple.py         |  2 +-
 tests/remoteexecution/buildtree.py  |  2 +-
 tests/remoteexecution/partial.py    |  5 +---
 tests/sourcecache/capabilities.py   |  2 +-
 tests/sourcecache/fetch.py          |  2 +-
 tests/sourcecache/push.py           | 10 +++----
 tests/sourcecache/workspace.py      |  6 +++-
 24 files changed, 91 insertions(+), 129 deletions(-)

diff --git a/src/buildstream/_assetcache.py b/src/buildstream/_assetcache.py
index 71c9cb6..aaea040 100644
--- a/src/buildstream/_assetcache.py
+++ b/src/buildstream/_assetcache.py
@@ -24,6 +24,7 @@ import grpc
 
 from . import utils
 from . import _yaml
+from .node import MappingNode
 from ._cas import CASRemote
 from ._exceptions import AssetCacheError, LoadError, RemoteError
 from ._remotespec import RemoteSpec, RemoteType
@@ -317,20 +318,7 @@ class AssetCache:
     @classmethod
     def specs_from_config_node(cls, config_node, basedir=None):
         cache_specs = []
-
-        try:
-            artifacts = [config_node.get_mapping(cls.config_node_name)]
-        except LoadError:
-            try:
-                artifacts = config_node.get_sequence(cls.config_node_name, default=[])
-            except LoadError:
-                provenance = config_node.get_node(cls.config_node_name).get_provenance()
-                raise _yaml.LoadError(
-                    "{}: '{}' must be a single remote mapping, or a list of mappings".format(
-                        provenance, cls.config_node_name
-                    ),
-                    _yaml.LoadErrorReason.INVALID_DATA,
-                )
+        artifacts = config_node.get_sequence(cls.config_node_name, default=[], allowed_types=[MappingNode])
 
         for spec_node in artifacts:
             cache_specs.append(RemoteSpec.new_from_node(spec_node))
diff --git a/src/buildstream/_project.py b/src/buildstream/_project.py
index 6dbc120..2f2effb 100644
--- a/src/buildstream/_project.py
+++ b/src/buildstream/_project.py
@@ -35,7 +35,7 @@ from .exceptions import LoadErrorReason
 from ._options import OptionPool
 from ._artifactcache import ArtifactCache
 from ._sourcecache import SourceCache
-from .node import ScalarNode, SequenceNode, _assert_symbol_name
+from .node import ScalarNode, SequenceNode, MappingNode, _assert_symbol_name
 from ._pluginfactory import ElementFactory, SourceFactory, load_plugin_origin
 from .types import CoreWarnings
 from ._projectrefs import ProjectRefs, ProjectRefStorage
@@ -864,11 +864,17 @@ class Project:
         # the values from our loaded configuration dictionary.
         #
 
-        # Load artifacts pull/push configuration for this project
-        self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config, self.directory)
+        # Load artifact remote specs
+        caches = config.get_sequence("artifacts", default=[], allowed_types=[MappingNode])
+        for node in caches:
+            spec = RemoteSpec.new_from_node(node, self.directory)
+            self.artifact_cache_specs.append(spec)
 
         # Load source caches with pull/push config
-        self.source_cache_specs = SourceCache.specs_from_config_node(config, self.directory)
+        caches = config.get_sequence("source-caches", default=[], allowed_types=[MappingNode])
+        for node in caches:
+            spec = RemoteSpec.new_from_node(node, self.directory)
+            self.source_cache_specs.append(spec)
 
         # Load sandbox environment variables
         self.base_environment = config.get_mapping("environment")
diff --git a/tests/artifactcache/capabilities.py b/tests/artifactcache/capabilities.py
index c8a49f9..0f2f1aa 100644
--- a/tests/artifactcache/capabilities.py
+++ b/tests/artifactcache/capabilities.py
@@ -27,7 +27,7 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 45682ca..1b8cbad 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -41,28 +41,12 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
         user_caches = []
 
     user_config = {}
-    if len(user_caches) == 1:
-        user_config["artifacts"] = {
-            "url": user_caches[0].url,
-            "push": user_caches[0].push,
-            "type": type_strings[user_caches[0].remote_type],
-        }
-    elif len(user_caches) > 1:
+    if user_caches:
         user_config["artifacts"] = [
             {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]} for cache in user_caches
         ]
 
-    if len(override_caches) == 1:
-        user_config["projects"] = {
-            "test": {
-                "artifacts": {
-                    "url": override_caches[0].url,
-                    "push": override_caches[0].push,
-                    "type": type_strings[override_caches[0].remote_type],
-                }
-            }
-        }
-    elif len(override_caches) > 1:
+    if override_caches:
         user_config["projects"] = {
             "test": {
                 "artifacts": [
@@ -74,25 +58,14 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
 
     project_config = {}
     if project_caches:
-        if len(project_caches) == 1:
-            project_config.update(
-                {
-                    "artifacts": {
-                        "url": project_caches[0].url,
-                        "push": project_caches[0].push,
-                        "type": type_strings[project_caches[0].remote_type],
-                    }
-                }
-            )
-        elif len(project_caches) > 1:
-            project_config.update(
-                {
-                    "artifacts": [
-                        {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
-                        for cache in project_caches
-                    ]
-                }
-            )
+        project_config.update(
+            {
+                "artifacts": [
+                    {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
+                    for cache in project_caches
+                ]
+            }
+        )
 
     return user_config, project_config
 
@@ -199,10 +172,12 @@ def test_only_one(cli, datafiles, override_caches, project_caches, user_caches):
 @pytest.mark.parametrize(
     "artifacts_config",
     (
-        {
-            "url": "http://localhost.test",
-            "auth": {"server-cert": "~/server.crt", "client-cert": "~/client.crt", "client-key": "~/client.key",},
-        },
+        [
+            {
+                "url": "http://localhost.test",
+                "auth": {"server-cert": "~/server.crt", "client-cert": "~/client.crt", "client-key": "~/client.key",},
+            }
+        ],
         [
             {
                 "url": "http://localhost.test",
diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index e1b7dbf..c180de4 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -17,7 +17,7 @@ DATA_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "junctions"
 def project_set_artifacts(project, url):
     project_conf_file = os.path.join(project, "project.conf")
     project_config = _yaml.load(project_conf_file, shortname=None)
-    project_config["artifacts"] = {"url": url, "push": True}
+    project_config["artifacts"] = [{"url": url, "push": True}]
     _yaml.roundtrip_dump(project_config.strip_node_info(), file=project_conf_file)
 
 
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index 63e6d98..fc27e75 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -41,7 +41,7 @@ def test_pull(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
 
@@ -106,7 +106,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": rootcache_dir,
         }
 
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 74062ce..02ff259 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -67,7 +67,7 @@ def test_push(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": rootcache_dir,
         }
 
@@ -124,7 +124,7 @@ def test_push_message(tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": {"url": share.repo, "push": True,},
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": rootcache_dir,
         }
 
diff --git a/tests/frontend/artifact_checkout.py b/tests/frontend/artifact_checkout.py
index 1375bc3..329d2df 100644
--- a/tests/frontend/artifact_checkout.py
+++ b/tests/frontend/artifact_checkout.py
@@ -39,7 +39,7 @@ def test_checkout(cli, tmpdir, datafiles, deps, expect_exist, expect_noexist, wi
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Build the element to push it to cache
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         # Build it
         result = cli.run(project=project, args=["build", "target-import.bst"])
diff --git a/tests/frontend/artifact_delete.py b/tests/frontend/artifact_delete.py
index 37b9731..90d448b 100644
--- a/tests/frontend/artifact_delete.py
+++ b/tests/frontend/artifact_delete.py
@@ -152,7 +152,7 @@ def test_artifact_delete_pulled_artifact_without_buildtree(cli, tmpdir, datafile
     local_cache = os.path.join(str(tmpdir), "artifacts")
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
         cli.configure(
-            {"artifacts": {"url": remote.repo, "push": True}, "cachedir": local_cache,}
+            {"artifacts": [{"url": remote.repo, "push": True}], "cachedir": local_cache,}
         )
 
         # Build the element
diff --git a/tests/frontend/artifact_pull.py b/tests/frontend/artifact_pull.py
index 4fa6b19..926f96e 100644
--- a/tests/frontend/artifact_pull.py
+++ b/tests/frontend/artifact_pull.py
@@ -41,7 +41,7 @@ def test_pull(cli, tmpdir, datafiles, deps, expect_cached, with_project):
         # Build the element to push it to cache, and explicitly configure local cache so we can check it
         local_cache = os.path.join(str(tmpdir), "cache")
         cli.configure(
-            {"cachedir": local_cache, "artifacts": {"url": share.repo, "push": True},}
+            {"cachedir": local_cache, "artifacts": [{"url": share.repo, "push": True}],}
         )
 
         # Build it
diff --git a/tests/frontend/artifact_show.py b/tests/frontend/artifact_show.py
index 2a7131c..7e8f8ee 100644
--- a/tests/frontend/artifact_show.py
+++ b/tests/frontend/artifact_show.py
@@ -184,7 +184,7 @@ def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
     local_cache = os.path.join(str(tmpdir), "artifacts")
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
         cli.configure(
-            {"artifacts": {"url": remote.repo, "push": True}, "cachedir": local_cache,}
+            {"artifacts": [{"url": remote.repo, "push": True}], "cachedir": local_cache,}
         )
 
         # Build the element
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 6d11906..3a1d650 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -124,7 +124,7 @@ def test_non_strict_pull_build_strict_checkout(datafiles, cli, tmpdir):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo}})
+        cli.configure({"artifacts": [{"url": share.repo}]})
 
         # First build it in non-strict mode with an artifact server configured.
         # With this configuration BuildStream will attempt to pull the build-only
@@ -1089,7 +1089,7 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         result = cli.run(project=project, args=["source", "track", input_name])
         result.assert_success()
@@ -1124,7 +1124,7 @@ def test_partial_checkout_fail(tmpdir, datafiles, cli):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         res = cli.run(project=project, args=["artifact", "checkout", "--pull", build_elt, "--directory", checkout_dir])
         res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
diff --git a/tests/frontend/default_target.py b/tests/frontend/default_target.py
index 60578bb..f6573c6 100644
--- a/tests/frontend/default_target.py
+++ b/tests/frontend/default_target.py
@@ -177,7 +177,7 @@ def test_default_target_push_pull(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Push the artifacts
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["artifact", "push"])
         result.assert_success()
 
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index f873ad9..b721ca8 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -46,7 +46,7 @@ def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", target])
         result.assert_success()
 
@@ -137,9 +137,7 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
 
         # Configure the default push location to be bad_share; we will assert that
         # nothing actually gets pushed there.
-        cli.configure(
-            {"artifacts": {"url": bad_share.repo, "push": True},}
-        )
+        cli.configure({"artifacts": [{"url": bad_share.repo, "push": True},]})
 
         # Now try `bst artifact push` to the good_share.
         result = cli.run(project=project, args=["artifact", "push", "target.bst", "--remote", good_share.repo])
@@ -175,7 +173,7 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "projects": {"test": {"strict": False}}})
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
         assert cli.get_element_state(project, "target.bst") == "cached"
@@ -225,7 +223,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
         generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"])
         result.assert_success()
         assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
@@ -290,7 +288,7 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
     project = str(datafiles)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         _test_pull_missing_blob(cli, project, share, share)
 
@@ -345,7 +343,7 @@ def test_pull_missing_local_blob(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the import-bin element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         result = cli.run(project=project, args=["source", "track", input_name])
         result.assert_success()
@@ -376,7 +374,7 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo}})
+        cli.configure({"artifacts": [{"url": share.repo}]})
         result = cli.run(project=project, args=["build", "target.bst"])
 
         result.assert_success()
@@ -397,10 +395,10 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
 
         # Add shareproject repo url to project.conf
         with open(os.path.join(project, "project.conf"), "a") as projconf:
-            projconf.write("artifacts:\n  url: {}\n  push: True".format(shareproject.repo))
+            projconf.write("artifacts:\n- url: {}\n  push: True".format(shareproject.repo))
 
         # Configure shareuser remote in user conf
-        cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": shareuser.repo, "push": True}]})
 
         # Push the artifacts to the shareuser and shareproject remotes.
         # Assert that shareuser and shareproject have the artfifacts cached,
@@ -457,7 +455,7 @@ def test_pull_access_rights(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", "compose-all.bst"])
         result.assert_success()
 
@@ -519,7 +517,7 @@ def test_pull_artifact(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         result = cli.run(project=project, args=["build", element])
         result.assert_success()
@@ -557,7 +555,7 @@ def test_dynamic_build_plan(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", target])
         result.assert_success()
 
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 4e39c22..8f71c8a 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -71,9 +71,7 @@ def test_push(cli, tmpdir, datafiles):
 
             # Configure bst to pull but not push from a cache and run `bst artifact push`.
             # This should also fail.
-            cli.configure(
-                {"artifacts": {"url": share1.repo, "push": False},}
-            )
+            cli.configure({"artifacts": [{"url": share1.repo, "push": False},]})
             result = cli.run(project=project, args=["artifact", "push", "target.bst"])
             result.assert_main_error(ErrorDomain.STREAM, None)
 
@@ -127,7 +125,7 @@ def test_push_artifact(cli, tmpdir, datafiles):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": {"url": share.repo, "push": True,},
+                "artifacts": [{"url": share.repo, "push": True,}],
             }
         )
 
@@ -163,7 +161,7 @@ def test_push_artifact_glob(cli, tmpdir, datafiles):
         assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
 
         # Configure artifact share
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         # Run bst artifact push with a wildcard, there is only one artifact
         # matching "test/target/*", even though it can be accessed both by it's
@@ -282,7 +280,7 @@ def test_push_deps(cli, tmpdir, datafiles, deps, expected_states):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": {"url": share.repo, "push": True,},
+                "artifacts": [{"url": share.repo, "push": True,}],
             }
         )
 
@@ -331,7 +329,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": {"url": share.repo, "push": True,},
+                "artifacts": [{"url": share.repo, "push": True,}],
             }
         )
 
@@ -354,9 +352,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
 
         # Set the scene: share1 has the artifact, share2 does not.
         #
-        cli.configure(
-            {"artifacts": {"url": share1.repo, "push": True},}
-        )
+        cli.configure({"artifacts": [{"url": share1.repo, "push": True},]})
 
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
@@ -400,9 +396,7 @@ def test_artifact_expires(cli, datafiles, tmpdir):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
 
         # Configure bst to push to the cache
-        cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
-        )
+        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
 
         # Create and build an element of 15 MB
         create_element_size("element1.bst", project, element_path, [], int(15e6))
@@ -453,7 +447,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
 
         # Configure bst to push to the remote cache
         cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
+            {"artifacts": [{"url": share.repo, "push": True}],}
         )
 
         # Create and push a 3MB element
@@ -506,7 +500,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
 
         # Configure bst to push to the cache
         cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
+            {"artifacts": [{"url": share.repo, "push": True}],}
         )
 
         # Create and build 2 elements, one 5 MB and one 15 MB.
@@ -570,7 +564,7 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         cli.configure(
-            {"artifacts": {"url": share.repo, "push": True},}
+            {"artifacts": [{"url": share.repo, "push": True}],}
         )
         cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"])
 
@@ -585,7 +579,7 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["build", "target.bst"])
 
         result.assert_success()
@@ -610,10 +604,10 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
 
         # Add shareproject repo url to project.conf
         with open(os.path.join(project, "project.conf"), "a") as projconf:
-            projconf.write("artifacts:\n  url: {}\n  push: True".format(shareproject.repo))
+            projconf.write("artifacts:\n- url: {}\n  push: True".format(shareproject.repo))
 
         # Configure shareuser remote in user conf
-        cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": shareuser.repo, "push": True}]})
 
         result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
 
@@ -640,7 +634,7 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
     caplog.set_level(1)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}, "projects": {"test": {"strict": False}}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "projects": {"test": {"strict": False}}})
 
         # First get us a build
         result = cli.run(project=project, args=["build", "target.bst"])
@@ -699,7 +693,7 @@ def test_push_after_rebuild(cli, tmpdir, datafiles):
     assert cli.get_element_state(project, "random.bst") != "cached"
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         # Now rebuild the element and push it
         result = cli.run(project=project, args=["build", "random.bst"])
@@ -724,7 +718,7 @@ def test_push_update_after_rebuild(cli, tmpdir, datafiles):
     )
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         # Build the element and push the artifact
         result = cli.run(project=project, args=["build", "random.bst"])
@@ -743,6 +737,6 @@ def test_push_update_after_rebuild(cli, tmpdir, datafiles):
         assert cli.get_element_state(project, "random.bst") == "cached"
 
         # Push the new build
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
         result = cli.run(project=project, args=["artifact", "push", "random.bst"])
         assert result.get_pushed_elements() == ["random.bst"]
diff --git a/tests/frontend/remote-caches.py b/tests/frontend/remote-caches.py
index ebafddf..03d728d 100644
--- a/tests/frontend/remote-caches.py
+++ b/tests/frontend/remote-caches.py
@@ -45,8 +45,8 @@ def test_source_artifact_caches(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
-            "artifacts": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": cachedir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -80,8 +80,8 @@ def test_source_cache_empty_artifact_cache(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
-            "artifacts": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
+            "artifacts": [{"url": share.repo, "push": True,}],
             "cachedir": cachedir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 813e681..3a8bc22 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -1030,7 +1030,7 @@ def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
         result = cli.run(project=project, args=["-C", workspace, "build", element_name])
         result.assert_success()
 
-        cli.configure({"artifacts": {"url": share.repo, "push": True}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
 
         result = cli.run(project=project, args=["-C", workspace, "artifact", "push", *arg_elm])
         result.assert_success()
diff --git a/tests/remotecache/simple.py b/tests/remotecache/simple.py
index 74c44ae..4f8896a 100644
--- a/tests/remotecache/simple.py
+++ b/tests/remotecache/simple.py
@@ -57,7 +57,7 @@ def test_remote_autotools_build_no_cache(cli, datafiles):
     checkout = os.path.join(cli.directory, "checkout")
     element_name = "autotools/amhello.bst"
 
-    cli.configure({"artifacts": {"url": "http://fake.url.service", "push": True}})
+    cli.configure({"artifacts": [{"url": "http://fake.url.service", "push": True}]})
     result = cli.run(project=project, args=["build", element_name])
     result.assert_success()
 
diff --git a/tests/remoteexecution/buildtree.py b/tests/remoteexecution/buildtree.py
index 317747f..4586286 100644
--- a/tests/remoteexecution/buildtree.py
+++ b/tests/remoteexecution/buildtree.py
@@ -41,7 +41,7 @@ def test_buildtree_remote(cli, tmpdir, datafiles):
     assert set(services) == set(["action-cache", "execution", "storage"])
 
     with create_artifact_share(share_path) as share:
-        cli.configure({"artifacts": {"url": share.repo, "push": True}, "cache": {"pull-buildtrees": False}})
+        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "cache": {"pull-buildtrees": False}})
 
         res = cli.run(project=project, args=["--cache-buildtrees", "always", "build", element_name])
         res.assert_success()
diff --git a/tests/remoteexecution/partial.py b/tests/remoteexecution/partial.py
index ec5fabe..7688df4 100644
--- a/tests/remoteexecution/partial.py
+++ b/tests/remoteexecution/partial.py
@@ -70,10 +70,7 @@ def test_build_partial_push(cli, tmpdir, datafiles):
         services = cli.ensure_services()
         assert set(services) == set(["action-cache", "execution", "storage"])
 
-        cli.config["artifacts"] = {
-            "url": share.repo,
-            "push": True,
-        }
+        cli.config["artifacts"] = [{"url": share.repo, "push": True,}]
 
         res = cli.run(project=project, args=["build", element_name])
         res.assert_success()
diff --git a/tests/sourcecache/capabilities.py b/tests/sourcecache/capabilities.py
index 9d41eba..964b458 100644
--- a/tests/sourcecache/capabilities.py
+++ b/tests/sourcecache/capabilities.py
@@ -25,7 +25,7 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
         # Configure artifact share
         cache_dir = os.path.join(str(tmpdir), "cache")
         user_config_file = str(tmpdir.join("buildstream.conf"))
-        user_config = {"scheduler": {"pushers": 1}, "source-caches": {"url": share.repo,}, "cachedir": cache_dir}
+        user_config = {"scheduler": {"pushers": 1}, "source-caches": [{"url": share.repo,}], "cachedir": cache_dir}
         _yaml.roundtrip_dump(user_config, file=user_config_file)
 
         with dummy_context(config=user_config_file) as context:
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index 76f5508..7b2c63a 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -51,7 +51,7 @@ def context_with_source_cache(cli, cache, share, tmpdir):
     user_config_file = str(tmpdir.join("buildstream.conf"))
     user_config = {
         "scheduler": {"pushers": 1},
-        "source-caches": {"url": share.repo,},
+        "source-caches": [{"url": share.repo,}],
         "cachedir": cache,
     }
     _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index 25a4309..bc591ad 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -115,7 +115,7 @@ def test_source_push(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -165,7 +165,7 @@ def test_push_pull(cli, datafiles, tmpdir):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -203,7 +203,7 @@ def test_push_fail(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -234,7 +234,7 @@ def test_source_push_build_fail(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         cli.configure(user_config)
@@ -275,7 +275,7 @@ def test_push_missing_source_after_build(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": {"url": share.repo, "push": True,},
+            "source-caches": [{"url": share.repo, "push": True,}],
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/sourcecache/workspace.py b/tests/sourcecache/workspace.py
index bb1ea50..7dccbe0 100644
--- a/tests/sourcecache/workspace.py
+++ b/tests/sourcecache/workspace.py
@@ -72,7 +72,11 @@ def test_workspace_open_no_source_push(tmpdir, datafiles, cli):
 
     with create_artifact_share(share_dir) as share:
         cli.configure(
-            {"cachedir": cache_dir, "scheduler": {"pushers": 1}, "source-caches": {"url": share.repo, "push": True,},}
+            {
+                "cachedir": cache_dir,
+                "scheduler": {"pushers": 1},
+                "source-caches": [{"url": share.repo, "push": True,}],
+            }
         )
 
         # Fetch as in previous test and check it pushes the source