You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by tv...@apache.org on 2021/01/28 07:43:24 UTC

[buildstream] 02/02: _context.py: Changed artifact and source cache configuration again.

This is an automated email from the ASF dual-hosted git repository.

tvb pushed a commit to branch tristan/change-remote-config
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit c0194c7f9066df21df2c2eb5ffc0c94b00f9e934
Author: Tristan van Berkom <tr...@codethink.co.uk>
AuthorDate: Thu Jan 28 16:21:39 2021 +0900

    _context.py: Changed artifact and source cache configuration again.
    
    Added a new "servers" sublist and made "artifacts" and "source-caches"
    dictionaries.
    
    This allows for adding the "override-project-caches" boolean attribute
    which decides whether this configuration overrides the recommendations
    made by project data.
    
    Also updated all tests to provide the new "servers" sub-list.
---
 src/buildstream/_context.py         | 97 ++++++++++++++++++++++++-------------
 src/buildstream/testing/runcli.py   |  4 +-
 tests/artifactcache/capabilities.py |  2 +-
 tests/artifactcache/config.py       | 24 +++++----
 tests/artifactcache/pull.py         |  4 +-
 tests/artifactcache/push.py         | 14 +++---
 tests/frontend/artifact_checkout.py |  2 +-
 tests/frontend/artifact_delete.py   |  4 +-
 tests/frontend/artifact_pull.py     |  4 +-
 tests/frontend/artifact_show.py     |  2 +-
 tests/frontend/buildcheckout.py     |  6 +--
 tests/frontend/default_target.py    |  2 +-
 tests/frontend/large_directory.py   |  2 +-
 tests/frontend/pull.py              | 38 +++++++++------
 tests/frontend/push.py              | 56 ++++++++++-----------
 tests/frontend/remote-caches.py     |  8 +--
 tests/frontend/workspace.py         |  2 +-
 tests/remotecache/simple.py         |  2 +-
 tests/sourcecache/capabilities.py   |  6 ++-
 tests/sourcecache/fetch.py          |  2 +-
 tests/sourcecache/push.py           | 20 ++++----
 tests/sourcecache/workspace.py      |  2 +-
 22 files changed, 174 insertions(+), 129 deletions(-)

diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 2caf8f9..2e85b87 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -42,6 +42,7 @@ from .node import Node, MappingNode
 if TYPE_CHECKING:
     # pylint: disable=cyclic-import
     from ._project import Project
+
     # pylint: enable=cyclic-import
 
 
@@ -176,9 +177,9 @@ class Context:
         # Whether elements must be rebuilt when their dependencies have changed
         self._strict_build_plan: Optional[bool] = None
 
-        # Lists of globally configured cache specs
-        self._global_artifact_cache_specs: List[RemoteSpec] = []
-        self._global_source_cache_specs: List[RemoteSpec] = []
+        # Lists of globally configured cache configurations
+        self._global_artifact_cache_config: _CacheConfig = _CacheConfig(False, [])
+        self._global_source_cache_config: _CacheConfig = _CacheConfig(False, [])
 
         # Set of all actively configured remote specs
         self._active_artifact_cache_specs: Set[RemoteSpec] = set()
@@ -342,13 +343,13 @@ class Context:
                 LoadErrorReason.INVALID_DATA,
             ) from e
 
-        # Load artifact remote specs
-        caches = defaults.get_sequence("artifacts", default=[], allowed_types=[MappingNode])
-        self._global_artifact_cache_specs = [RemoteSpec.new_from_node(node) for node in caches]
+        # Load global artifact cache configuration
+        cache_config = defaults.get_mapping("artifacts", default={})
+        self._global_artifact_cache_config = _CacheConfig.new_from_node(cache_config)
 
-        # Load source cache remote specs
-        caches = defaults.get_sequence("source-caches", default=[], allowed_types=[MappingNode])
-        self._global_source_cache_specs = [RemoteSpec.new_from_node(node) for node in caches]
+        # Load global source cache configuration
+        cache_config = defaults.get_mapping("source-caches", default={})
+        self._global_source_cache_config = _CacheConfig.new_from_node(cache_config)
 
         # Load the global remote execution config including pull-artifact-files setting
         remote_execution = defaults.get_mapping("remote-execution", default=None)
@@ -517,12 +518,9 @@ class Context:
         project = self.get_toplevel_project()
         if project:
             override_node = self.get_overrides(project.name)
-            if override_node:
-                remote_execution = override_node.get_mapping("remote-execution", default=None)
-                if remote_execution:
-                    self.pull_artifact_files, self.remote_execution_specs = self._load_remote_execution(
-                        remote_execution
-                    )
+            remote_execution = override_node.get_mapping("remote-execution", default=None)
+            if remote_execution:
+                self.pull_artifact_files, self.remote_execution_specs = self._load_remote_execution(remote_execution)
 
         # Collect a table of which specs apply to each project, these
         # are calculated here and handed over to the asset caches.
@@ -534,37 +532,46 @@ class Context:
         cli_source_remotes = [source_remote] if source_remote else []
 
         #
+        # Helper function to resolve which remote specs apply for a given project
+        #
+        def resolve_specs_for_project(
+            project: "Project", global_config: _CacheConfig, override_key: str, project_attribute: str,
+        ) -> List[RemoteSpec]:
+
+            # Obtain the overrides
+            override_node = self.get_overrides(project.name)
+            override_config_node = override_node.get_mapping(override_key, default={})
+            override_config = _CacheConfig.new_from_node(override_config_node)
+            if override_config.override_projects:
+                return override_config.remote_specs
+            elif global_config.override_projects:
+                return global_config.remote_specs
+
+            # If there were no explicit overrides, then take either the project specific
+            # config or fallback to the global config, and tack on the project recommended
+            # remotes at the end.
+            #
+            config_specs = override_config.remote_specs or global_config.remote_specs
+            project_specs = getattr(project, project_attribute)
+            all_specs = config_specs + project_specs
+            return list(utils._deduplicate(all_specs))
+
+        #
         # Maintain our list of remote specs for artifact and source caches
         #
         for project in self._projects:
-
             artifact_specs: List[RemoteSpec] = []
             source_specs: List[RemoteSpec] = []
 
-            override_node = self.get_overrides(project.name)
-
-            # Resolve which remote specs to use, CLI -> Override -> Global -> Project recommendation
             if connect_artifact_cache:
-                caches = override_node.get_sequence("artifacts", default=[], allowed_types=[MappingNode])
-                override_artifact_specs: List[RemoteSpec] = [RemoteSpec.new_from_node(node) for node in caches]
-                artifact_specs = (
-                    cli_artifact_remotes
-                    or override_artifact_specs
-                    or self._global_artifact_cache_specs
-                    or project.artifact_cache_specs
+                artifact_specs = cli_artifact_remotes or resolve_specs_for_project(
+                    project, self._global_artifact_cache_config, "artifacts", "artifact_cache_specs",
                 )
-                artifact_specs = list(utils._deduplicate(artifact_specs))
 
             if connect_source_cache:
-                caches = override_node.get_sequence("source-caches", default=[], allowed_types=[MappingNode])
-                override_source_specs: List[RemoteSpec] = [RemoteSpec.new_from_node(node) for node in caches]
-                source_specs = (
-                    cli_source_remotes
-                    or override_source_specs
-                    or self._global_source_cache_specs
-                    or project.source_cache_specs
+                source_specs = cli_source_remotes or resolve_specs_for_project(
+                    project, self._global_source_cache_config, "source-caches", "source_cache_specs",
                 )
-                source_specs = list(utils._deduplicate(source_specs))
 
             # Store them for lookups later on
             project_artifact_cache_specs[project.name] = artifact_specs
@@ -703,3 +710,23 @@ class Context:
             remote_execution_specs = None
 
         return pull_artifact_files, remote_execution_specs
+
+
+# _CacheConfig
+#
+# A convenience object for parsing artifact/source cache configurations
+#
+class _CacheConfig:
+    def __init__(self, override_projects: bool, remote_specs: List[RemoteSpec]):
+        self.override_projects: bool = override_projects
+        self.remote_specs: List[RemoteSpec] = remote_specs
+
+    @classmethod
+    def new_from_node(cls, node: MappingNode) -> "_CacheConfig":
+        node.validate_keys(["override-project-caches", "servers"])
+        servers = node.get_sequence("servers", default=[], allowed_types=[MappingNode])
+
+        override_projects: bool = node.get_bool("push", default=False)
+        remote_specs: List[RemoteSpec] = [RemoteSpec.new_from_node(node) for node in servers]
+
+        return cls(override_projects, remote_specs)
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 6a170a4..31b74c6 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -787,7 +787,7 @@ def cli_remote_execution(tmpdir, remote_services):
     if remote_services.artifact_storage_service:
         artifacts.append({"url": remote_services.artifact_storage_service, "push": True, "type": "storage"})
     if artifacts:
-        fixture.configure({"artifacts": artifacts})
+        fixture.configure({"artifacts": {"servers": artifacts}})
 
     remote_execution = {}
     if remote_services.action_service:
@@ -806,7 +806,7 @@ def cli_remote_execution(tmpdir, remote_services):
         fixture.configure({"remote-execution": remote_execution})
 
     if remote_services.source_service:
-        fixture.configure({"source-caches": [{"url": remote_services.source_service,}]})
+        fixture.configure({"source-caches": {"servers": [{"url": remote_services.source_service,}]}})
 
     return fixture
 
diff --git a/tests/artifactcache/capabilities.py b/tests/artifactcache/capabilities.py
index a74ed6c..09a6b59 100644
--- a/tests/artifactcache/capabilities.py
+++ b/tests/artifactcache/capabilities.py
@@ -27,7 +27,7 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 3bf853a..5223485 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -40,17 +40,22 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
 
     user_config = {}
     if user_caches:
-        user_config["artifacts"] = [
-            {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]} for cache in user_caches
-        ]
+        user_config["artifacts"] = {
+            "servers": [
+                {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
+                for cache in user_caches
+            ]
+        }
 
     if override_caches:
         user_config["projects"] = {
             "test": {
-                "artifacts": [
-                    {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
-                    for cache in override_caches
-                ]
+                "artifacts": {
+                    "servers": [
+                        {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
+                        for cache in override_caches
+                    ]
+                }
             }
         }
 
@@ -106,7 +111,8 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
         parsed_cache_specs = artifactcache._project_specs[project.name]
 
         # Verify that it was correctly read.
-        expected_cache_specs = list(_deduplicate(override_caches or user_caches or project_caches))
+        expected_cache_specs = list(_deduplicate(override_caches or user_caches))
+        expected_cache_specs = list(_deduplicate(expected_cache_specs + project_caches))
         assert parsed_cache_specs == expected_cache_specs
 
 
@@ -205,7 +211,7 @@ def test_paths_for_artifact_config_are_expanded(tmpdir, monkeypatch, artifacts_c
     project_config = {"name": "test", "min-version": "2.0"}
     user_config = {}
     if in_user_config:
-        user_config["artifacts"] = artifacts_config
+        user_config["artifacts"] = {"servers": artifacts_config}
     else:
         project_config["artifacts"] = artifacts_config
 
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index 502c350..28f8588 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -41,7 +41,7 @@ def test_pull(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
 
@@ -106,7 +106,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": rootcache_dir,
         }
 
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 48985df..858065d 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -66,7 +66,7 @@ def test_push(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": rootcache_dir,
         }
 
@@ -95,10 +95,12 @@ def test_push_split(cli, tmpdir, datafiles):
         rootcache_dir = os.path.join(str(tmpdir), "cache")
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [
-                {"url": index.repo, "push": True, "type": "index"},
-                {"url": storage.repo, "push": True, "type": "storage"},
-            ],
+            "artifacts": {
+                "servers": [
+                    {"url": index.repo, "push": True, "type": "index"},
+                    {"url": storage.repo, "push": True, "type": "storage"},
+                ],
+            },
             "cachedir": rootcache_dir,
         }
         config_path = str(tmpdir.join("buildstream.conf"))
@@ -123,7 +125,7 @@ def test_push_message(tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": rootcache_dir,
         }
 
diff --git a/tests/frontend/artifact_checkout.py b/tests/frontend/artifact_checkout.py
index 329d2df..e8231b8 100644
--- a/tests/frontend/artifact_checkout.py
+++ b/tests/frontend/artifact_checkout.py
@@ -39,7 +39,7 @@ def test_checkout(cli, tmpdir, datafiles, deps, expect_exist, expect_noexist, wi
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Build the element to push it to cache
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Build it
         result = cli.run(project=project, args=["build", "target-import.bst"])
diff --git a/tests/frontend/artifact_delete.py b/tests/frontend/artifact_delete.py
index 90d448b..e120810 100644
--- a/tests/frontend/artifact_delete.py
+++ b/tests/frontend/artifact_delete.py
@@ -151,9 +151,7 @@ def test_artifact_delete_pulled_artifact_without_buildtree(cli, tmpdir, datafile
     # Set up remote and local shares
     local_cache = os.path.join(str(tmpdir), "artifacts")
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
-        cli.configure(
-            {"artifacts": [{"url": remote.repo, "push": True}], "cachedir": local_cache,}
-        )
+        cli.configure({"artifacts": {"servers": [{"url": remote.repo, "push": True}]}, "cachedir": local_cache})
 
         # Build the element
         result = cli.run(project=project, args=["build", element])
diff --git a/tests/frontend/artifact_pull.py b/tests/frontend/artifact_pull.py
index 926f96e..afd9f62 100644
--- a/tests/frontend/artifact_pull.py
+++ b/tests/frontend/artifact_pull.py
@@ -40,9 +40,7 @@ def test_pull(cli, tmpdir, datafiles, deps, expect_cached, with_project):
 
         # Build the element to push it to cache, and explicitly configure local cache so we can check it
         local_cache = os.path.join(str(tmpdir), "cache")
-        cli.configure(
-            {"cachedir": local_cache, "artifacts": [{"url": share.repo, "push": True}],}
-        )
+        cli.configure({"cachedir": local_cache, "artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Build it
         result = cli.run(project=project, args=["build", "target.bst"])
diff --git a/tests/frontend/artifact_show.py b/tests/frontend/artifact_show.py
index 7e8f8ee..652adfb 100644
--- a/tests/frontend/artifact_show.py
+++ b/tests/frontend/artifact_show.py
@@ -184,7 +184,7 @@ def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
     local_cache = os.path.join(str(tmpdir), "artifacts")
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
         cli.configure(
-            {"artifacts": [{"url": remote.repo, "push": True}], "cachedir": local_cache,}
+            {"artifacts": {"servers": [{"url": remote.repo, "push": True}]}, "cachedir": local_cache,}
         )
 
         # Build the element
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 3a1d650..628e886 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -124,7 +124,7 @@ def test_non_strict_pull_build_strict_checkout(datafiles, cli, tmpdir):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})
 
         # First build it in non-strict mode with an artifact server configured.
         # With this configuration BuildStream will attempt to pull the build-only
@@ -1089,7 +1089,7 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["source", "track", input_name])
         result.assert_success()
@@ -1124,7 +1124,7 @@ def test_partial_checkout_fail(tmpdir, datafiles, cli):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         res = cli.run(project=project, args=["artifact", "checkout", "--pull", build_elt, "--directory", checkout_dir])
         res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
diff --git a/tests/frontend/default_target.py b/tests/frontend/default_target.py
index f6573c6..a10133d 100644
--- a/tests/frontend/default_target.py
+++ b/tests/frontend/default_target.py
@@ -177,7 +177,7 @@ def test_default_target_push_pull(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Push the artifacts
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["artifact", "push"])
         result.assert_success()
 
diff --git a/tests/frontend/large_directory.py b/tests/frontend/large_directory.py
index ea29fd1..f8ac932 100644
--- a/tests/frontend/large_directory.py
+++ b/tests/frontend/large_directory.py
@@ -64,7 +64,7 @@ def test_large_directory(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Configure bst to push to the artifact share
-        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})
 
         # Enforce 1 MB gRPC message limit
         with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 6733584..3e50b72 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -46,7 +46,7 @@ def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", target])
         result.assert_success()
 
@@ -90,7 +90,9 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
     ) as share2:
 
         # Build the target and push it to share2 only.
-        cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
+        cli.configure(
+            {"artifacts": {"servers": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]}}
+        )
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
 
@@ -137,7 +139,7 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
 
         # Configure the default push location to be bad_share; we will assert that
         # nothing actually gets pushed there.
-        cli.configure({"artifacts": [{"url": bad_share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": bad_share.repo, "push": True},]}})
 
         # Now try `bst artifact push` to the good_share.
         result = cli.run(project=project, args=["artifact", "push", "target.bst", "--remote", good_share.repo])
@@ -173,7 +175,9 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "projects": {"test": {"strict": False}}})
+        cli.configure(
+            {"artifacts": {"servers": [{"url": share.repo, "push": True}]}, "projects": {"test": {"strict": False}}}
+        )
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
         assert cli.get_element_state(project, "target.bst") == "cached"
@@ -223,7 +227,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
         generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"])
         result.assert_success()
         assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
@@ -288,7 +292,7 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
     project = str(datafiles)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         _test_pull_missing_blob(cli, project, share, share)
 
@@ -303,10 +307,12 @@ def test_pull_missing_blob_split_share(cli, tmpdir, datafiles):
     with create_split_share(indexshare, storageshare) as (index, storage):
         cli.configure(
             {
-                "artifacts": [
-                    {"url": index.repo, "push": True, "type": "index"},
-                    {"url": storage.repo, "push": True, "type": "storage"},
-                ]
+                "artifacts": {
+                    "servers": [
+                        {"url": index.repo, "push": True, "type": "index"},
+                        {"url": storage.repo, "push": True, "type": "storage"},
+                    ]
+                }
             }
         )
 
@@ -343,7 +349,7 @@ def test_pull_missing_local_blob(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the import-bin element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["source", "track", input_name])
         result.assert_success()
@@ -374,7 +380,7 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})
         result = cli.run(project=project, args=["build", "target.bst"])
 
         result.assert_success()
@@ -394,7 +400,7 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
     ) as sharecli:
 
         # Configure shareuser remote in user conf
-        cli.configure({"artifacts": [{"url": shareuser.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": shareuser.repo, "push": True}]}})
 
         # Push the artifacts to the shareuser remote.
         # Assert that shareuser has the artfifacts cached, but sharecli doesn't,
@@ -449,7 +455,7 @@ def test_pull_access_rights(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", "compose-all.bst"])
         result.assert_success()
 
@@ -511,7 +517,7 @@ def test_pull_artifact(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["build", element])
         result.assert_success()
@@ -549,7 +555,7 @@ def test_dynamic_build_plan(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", target])
         result.assert_success()
 
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 8f71c8a..4059b7c 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -71,12 +71,14 @@ def test_push(cli, tmpdir, datafiles):
 
             # Configure bst to pull but not push from a cache and run `bst artifact push`.
             # This should also fail.
-            cli.configure({"artifacts": [{"url": share1.repo, "push": False},]})
+            cli.configure({"artifacts": {"servers": [{"url": share1.repo, "push": False}]}})
             result = cli.run(project=project, args=["artifact", "push", "target.bst"])
             result.assert_main_error(ErrorDomain.STREAM, None)
 
             # Configure bst to push to one of the caches and run `bst artifact push`. This works.
-            cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
+            cli.configure(
+                {"artifacts": {"servers": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]}}
+            )
             cli.run(project=project, args=["artifact", "push", "target.bst"])
 
             assert_not_shared(cli, share1, project, "target.bst")
@@ -85,7 +87,9 @@ def test_push(cli, tmpdir, datafiles):
         # Now try pushing to both
 
         with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
-            cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
+            cli.configure(
+                {"artifacts": {"servers": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]}}
+            )
             cli.run(project=project, args=["artifact", "push", "target.bst"])
 
             assert_shared(cli, share1, project, "target.bst")
@@ -125,7 +129,7 @@ def test_push_artifact(cli, tmpdir, datafiles):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": [{"url": share.repo, "push": True,}],
+                "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             }
         )
 
@@ -161,7 +165,7 @@ def test_push_artifact_glob(cli, tmpdir, datafiles):
         assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
 
         # Configure artifact share
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Run bst artifact push with a wildcard, there is only one artifact
         # matching "test/target/*", even though it can be accessed both by it's
@@ -184,7 +188,7 @@ def test_push_fails(cli, tmpdir, datafiles):
     # Set up the share
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Configure bst to be able to push to the share
-        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})
 
         # First ensure that the target is *NOT* cache
         assert cli.get_element_state(project, "target.bst") != "cached"
@@ -222,7 +226,7 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
         assert cli.get_element_state(project, "import-dev.bst") != "cached"
 
         # Configure bst to be able to push to the share
-        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})
 
         # Now try and push the target with its deps using --on-error continue
         # and assert that push failed, but what could be pushed was pushed
@@ -280,7 +284,7 @@ def test_push_deps(cli, tmpdir, datafiles, deps, expected_states):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": [{"url": share.repo, "push": True,}],
+                "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             }
         )
 
@@ -329,7 +333,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": [{"url": share.repo, "push": True,}],
+                "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             }
         )
 
@@ -352,7 +356,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
 
         # Set the scene: share1 has the artifact, share2 does not.
         #
-        cli.configure({"artifacts": [{"url": share1.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share1.repo, "push": True},]}})
 
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
@@ -376,7 +380,9 @@ def test_push_after_pull(cli, tmpdir, datafiles):
 
         # Now we add share2 into the mix as a second push remote. This time,
         # `bst build` should push to share2 after pulling from share1.
-        cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
+        cli.configure(
+            {"artifacts": {"servers": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]}}
+        )
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
         assert result.get_pulled_elements() == ["target.bst"]
@@ -396,7 +402,7 @@ def test_artifact_expires(cli, datafiles, tmpdir):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
 
         # Configure bst to push to the cache
-        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})
 
         # Create and build an element of 15 MB
         create_element_size("element1.bst", project, element_path, [], int(15e6))
@@ -446,9 +452,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)) as share:
 
         # Configure bst to push to the remote cache
-        cli.configure(
-            {"artifacts": [{"url": share.repo, "push": True}],}
-        )
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}],}})
 
         # Create and push a 3MB element
         create_element_size("small_element.bst", project, element_path, [], int(3e6))
@@ -499,9 +503,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
 
         # Configure bst to push to the cache
-        cli.configure(
-            {"artifacts": [{"url": share.repo, "push": True}],}
-        )
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}],}})
 
         # Create and build 2 elements, one 5 MB and one 15 MB.
         create_element_size("element1.bst", project, element_path, [], int(5e6))
@@ -563,9 +565,7 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
     assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure(
-            {"artifacts": [{"url": share.repo, "push": True}],}
-        )
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}],}})
         cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"])
 
         cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
@@ -579,7 +579,7 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", "target.bst"])
 
         result.assert_success()
@@ -607,7 +607,7 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
             projconf.write("artifacts:\n- url: {}\n  push: True".format(shareproject.repo))
 
         # Configure shareuser remote in user conf
-        cli.configure({"artifacts": [{"url": shareuser.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": shareuser.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
 
@@ -634,7 +634,9 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
     caplog.set_level(1)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "projects": {"test": {"strict": False}}})
+        cli.configure(
+            {"artifacts": {"servers": [{"url": share.repo, "push": True}]}, "projects": {"test": {"strict": False}}}
+        )
 
         # First get us a build
         result = cli.run(project=project, args=["build", "target.bst"])
@@ -693,7 +695,7 @@ def test_push_after_rebuild(cli, tmpdir, datafiles):
     assert cli.get_element_state(project, "random.bst") != "cached"
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Now rebuild the element and push it
         result = cli.run(project=project, args=["build", "random.bst"])
@@ -718,7 +720,7 @@ def test_push_update_after_rebuild(cli, tmpdir, datafiles):
     )
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Build the element and push the artifact
         result = cli.run(project=project, args=["build", "random.bst"])
@@ -737,6 +739,6 @@ def test_push_update_after_rebuild(cli, tmpdir, datafiles):
         assert cli.get_element_state(project, "random.bst") == "cached"
 
         # Push the new build
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["artifact", "push", "random.bst"])
         assert result.get_pushed_elements() == ["random.bst"]
diff --git a/tests/frontend/remote-caches.py b/tests/frontend/remote-caches.py
index 03d728d..103af10 100644
--- a/tests/frontend/remote-caches.py
+++ b/tests/frontend/remote-caches.py
@@ -45,8 +45,8 @@ def test_source_artifact_caches(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cachedir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -80,8 +80,8 @@ def test_source_cache_empty_artifact_cache(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cachedir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 3a8bc22..17ff8b2 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -1030,7 +1030,7 @@ def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
         result = cli.run(project=project, args=["-C", workspace, "build", element_name])
         result.assert_success()
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["-C", workspace, "artifact", "push", *arg_elm])
         result.assert_success()
diff --git a/tests/remotecache/simple.py b/tests/remotecache/simple.py
index 4f8896a..00b32ec 100644
--- a/tests/remotecache/simple.py
+++ b/tests/remotecache/simple.py
@@ -57,7 +57,7 @@ def test_remote_autotools_build_no_cache(cli, datafiles):
     checkout = os.path.join(cli.directory, "checkout")
     element_name = "autotools/amhello.bst"
 
-    cli.configure({"artifacts": [{"url": "http://fake.url.service", "push": True}]})
+    cli.configure({"artifacts": {"servers": [{"url": "http://fake.url.service", "push": True}]}})
     result = cli.run(project=project, args=["build", element_name])
     result.assert_success()
 
diff --git a/tests/sourcecache/capabilities.py b/tests/sourcecache/capabilities.py
index 67be749..7ca6581 100644
--- a/tests/sourcecache/capabilities.py
+++ b/tests/sourcecache/capabilities.py
@@ -25,7 +25,11 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
         # Configure artifact share
         cache_dir = os.path.join(str(tmpdir), "cache")
         user_config_file = str(tmpdir.join("buildstream.conf"))
-        user_config = {"scheduler": {"pushers": 1}, "source-caches": [{"url": share.repo,}], "cachedir": cache_dir}
+        user_config = {
+            "scheduler": {"pushers": 1},
+            "source-caches": {"servers": [{"url": share.repo,}]},
+            "cachedir": cache_dir,
+        }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
 
         with dummy_context(config=user_config_file) as context:
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index 7b2c63a..7af6ffc 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -51,7 +51,7 @@ def context_with_source_cache(cli, cache, share, tmpdir):
     user_config_file = str(tmpdir.join("buildstream.conf"))
     user_config = {
         "scheduler": {"pushers": 1},
-        "source-caches": [{"url": share.repo,}],
+        "source-caches": {"servers": [{"url": share.repo,}]},
         "cachedir": cache,
     }
     _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index bc591ad..edbcfdf 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -62,10 +62,12 @@ def test_source_push_split(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [
-                {"url": index.repo, "push": True, "type": "index"},
-                {"url": storage.repo, "push": True, "type": "storage"},
-            ],
+            "source-caches": {
+                "servers": [
+                    {"url": index.repo, "push": True, "type": "index"},
+                    {"url": storage.repo, "push": True, "type": "storage"},
+                ]
+            },
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -115,7 +117,7 @@ def test_source_push(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -165,7 +167,7 @@ def test_push_pull(cli, datafiles, tmpdir):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -203,7 +205,7 @@ def test_push_fail(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -234,7 +236,7 @@ def test_source_push_build_fail(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         cli.configure(user_config)
@@ -275,7 +277,7 @@ def test_push_missing_source_after_build(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/sourcecache/workspace.py b/tests/sourcecache/workspace.py
index 7dccbe0..c9cb79b 100644
--- a/tests/sourcecache/workspace.py
+++ b/tests/sourcecache/workspace.py
@@ -75,7 +75,7 @@ def test_workspace_open_no_source_push(tmpdir, datafiles, cli):
             {
                 "cachedir": cache_dir,
                 "scheduler": {"pushers": 1},
-                "source-caches": [{"url": share.repo, "push": True,}],
+                "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             }
         )