You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by tv...@apache.org on 2021/01/28 07:43:22 UTC

[buildstream] branch tristan/change-remote-config updated (e8578d5 -> c0194c7)

This is an automated email from the ASF dual-hosted git repository.

tvb pushed a change to branch tristan/change-remote-config
in repository https://gitbox.apache.org/repos/asf/buildstream.git.


    from e8578d5  _context.py: Now with full type annotations.
     new 76489ef  doc: Redocumenting artifact/source cache servers.
     new c0194c7  _context.py: Changed artifact and source cache configuration again.

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 doc/source/format_project.rst                 | 161 ++-------
 doc/source/using_config.rst                   | 464 +++++++++++++++++++-------
 doc/source/using_configuring_cache_server.rst |  57 +---
 src/buildstream/_context.py                   |  97 ++++--
 src/buildstream/plugins/elements/stack.py     |   2 +-
 src/buildstream/testing/runcli.py             |   4 +-
 tests/artifactcache/capabilities.py           |   2 +-
 tests/artifactcache/config.py                 |  24 +-
 tests/artifactcache/pull.py                   |   4 +-
 tests/artifactcache/push.py                   |  14 +-
 tests/frontend/artifact_checkout.py           |   2 +-
 tests/frontend/artifact_delete.py             |   4 +-
 tests/frontend/artifact_pull.py               |   4 +-
 tests/frontend/artifact_show.py               |   2 +-
 tests/frontend/buildcheckout.py               |   6 +-
 tests/frontend/default_target.py              |   2 +-
 tests/frontend/large_directory.py             |   2 +-
 tests/frontend/pull.py                        |  38 ++-
 tests/frontend/push.py                        |  56 ++--
 tests/frontend/remote-caches.py               |   8 +-
 tests/frontend/workspace.py                   |   2 +-
 tests/remotecache/simple.py                   |   2 +-
 tests/sourcecache/capabilities.py             |   6 +-
 tests/sourcecache/fetch.py                    |   2 +-
 tests/sourcecache/push.py                     |  20 +-
 tests/sourcecache/workspace.py                |   2 +-
 26 files changed, 575 insertions(+), 412 deletions(-)


[buildstream] 02/02: _context.py: Changed artifact and source cache configuration again.

Posted by tv...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

tvb pushed a commit to branch tristan/change-remote-config
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit c0194c7f9066df21df2c2eb5ffc0c94b00f9e934
Author: Tristan van Berkom <tr...@codethink.co.uk>
AuthorDate: Thu Jan 28 16:21:39 2021 +0900

    _context.py: Changed artifact and source cache configuration again.
    
    Added a new "servers" sublist and made "artifacts" and "source-caches"
    dictionaries.
    
    This allows for adding the "override-project-caches" boolean attribute
    which decides whether this configuration overrides the recommendations
    made by project data.
    
    Also updated all tests to provide the new "servers" sub-list.
---
 src/buildstream/_context.py         | 97 ++++++++++++++++++++++++-------------
 src/buildstream/testing/runcli.py   |  4 +-
 tests/artifactcache/capabilities.py |  2 +-
 tests/artifactcache/config.py       | 24 +++++----
 tests/artifactcache/pull.py         |  4 +-
 tests/artifactcache/push.py         | 14 +++---
 tests/frontend/artifact_checkout.py |  2 +-
 tests/frontend/artifact_delete.py   |  4 +-
 tests/frontend/artifact_pull.py     |  4 +-
 tests/frontend/artifact_show.py     |  2 +-
 tests/frontend/buildcheckout.py     |  6 +--
 tests/frontend/default_target.py    |  2 +-
 tests/frontend/large_directory.py   |  2 +-
 tests/frontend/pull.py              | 38 +++++++++------
 tests/frontend/push.py              | 56 ++++++++++-----------
 tests/frontend/remote-caches.py     |  8 +--
 tests/frontend/workspace.py         |  2 +-
 tests/remotecache/simple.py         |  2 +-
 tests/sourcecache/capabilities.py   |  6 ++-
 tests/sourcecache/fetch.py          |  2 +-
 tests/sourcecache/push.py           | 20 ++++----
 tests/sourcecache/workspace.py      |  2 +-
 22 files changed, 174 insertions(+), 129 deletions(-)

diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 2caf8f9..2e85b87 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -42,6 +42,7 @@ from .node import Node, MappingNode
 if TYPE_CHECKING:
     # pylint: disable=cyclic-import
     from ._project import Project
+
     # pylint: enable=cyclic-import
 
 
@@ -176,9 +177,9 @@ class Context:
         # Whether elements must be rebuilt when their dependencies have changed
         self._strict_build_plan: Optional[bool] = None
 
-        # Lists of globally configured cache specs
-        self._global_artifact_cache_specs: List[RemoteSpec] = []
-        self._global_source_cache_specs: List[RemoteSpec] = []
+        # Lists of globally configured cache configurations
+        self._global_artifact_cache_config: _CacheConfig = _CacheConfig(False, [])
+        self._global_source_cache_config: _CacheConfig = _CacheConfig(False, [])
 
         # Set of all actively configured remote specs
         self._active_artifact_cache_specs: Set[RemoteSpec] = set()
@@ -342,13 +343,13 @@ class Context:
                 LoadErrorReason.INVALID_DATA,
             ) from e
 
-        # Load artifact remote specs
-        caches = defaults.get_sequence("artifacts", default=[], allowed_types=[MappingNode])
-        self._global_artifact_cache_specs = [RemoteSpec.new_from_node(node) for node in caches]
+        # Load global artifact cache configuration
+        cache_config = defaults.get_mapping("artifacts", default={})
+        self._global_artifact_cache_config = _CacheConfig.new_from_node(cache_config)
 
-        # Load source cache remote specs
-        caches = defaults.get_sequence("source-caches", default=[], allowed_types=[MappingNode])
-        self._global_source_cache_specs = [RemoteSpec.new_from_node(node) for node in caches]
+        # Load global source cache configuration
+        cache_config = defaults.get_mapping("source-caches", default={})
+        self._global_source_cache_config = _CacheConfig.new_from_node(cache_config)
 
         # Load the global remote execution config including pull-artifact-files setting
         remote_execution = defaults.get_mapping("remote-execution", default=None)
@@ -517,12 +518,9 @@ class Context:
         project = self.get_toplevel_project()
         if project:
             override_node = self.get_overrides(project.name)
-            if override_node:
-                remote_execution = override_node.get_mapping("remote-execution", default=None)
-                if remote_execution:
-                    self.pull_artifact_files, self.remote_execution_specs = self._load_remote_execution(
-                        remote_execution
-                    )
+            remote_execution = override_node.get_mapping("remote-execution", default=None)
+            if remote_execution:
+                self.pull_artifact_files, self.remote_execution_specs = self._load_remote_execution(remote_execution)
 
         # Collect a table of which specs apply to each project, these
         # are calculated here and handed over to the asset caches.
@@ -534,37 +532,46 @@ class Context:
         cli_source_remotes = [source_remote] if source_remote else []
 
         #
+        # Helper function to resolve which remote specs apply for a given project
+        #
+        def resolve_specs_for_project(
+            project: "Project", global_config: _CacheConfig, override_key: str, project_attribute: str,
+        ) -> List[RemoteSpec]:
+
+            # Obtain the overrides
+            override_node = self.get_overrides(project.name)
+            override_config_node = override_node.get_mapping(override_key, default={})
+            override_config = _CacheConfig.new_from_node(override_config_node)
+            if override_config.override_projects:
+                return override_config.remote_specs
+            elif global_config.override_projects:
+                return global_config.remote_specs
+
+            # If there were no explicit overrides, then take either the project specific
+            # config or fallback to the global config, and tack on the project recommended
+            # remotes at the end.
+            #
+            config_specs = override_config.remote_specs or global_config.remote_specs
+            project_specs = getattr(project, project_attribute)
+            all_specs = config_specs + project_specs
+            return list(utils._deduplicate(all_specs))
+
+        #
         # Maintain our list of remote specs for artifact and source caches
         #
         for project in self._projects:
-
             artifact_specs: List[RemoteSpec] = []
             source_specs: List[RemoteSpec] = []
 
-            override_node = self.get_overrides(project.name)
-
-            # Resolve which remote specs to use, CLI -> Override -> Global -> Project recommendation
             if connect_artifact_cache:
-                caches = override_node.get_sequence("artifacts", default=[], allowed_types=[MappingNode])
-                override_artifact_specs: List[RemoteSpec] = [RemoteSpec.new_from_node(node) for node in caches]
-                artifact_specs = (
-                    cli_artifact_remotes
-                    or override_artifact_specs
-                    or self._global_artifact_cache_specs
-                    or project.artifact_cache_specs
+                artifact_specs = cli_artifact_remotes or resolve_specs_for_project(
+                    project, self._global_artifact_cache_config, "artifacts", "artifact_cache_specs",
                 )
-                artifact_specs = list(utils._deduplicate(artifact_specs))
 
             if connect_source_cache:
-                caches = override_node.get_sequence("source-caches", default=[], allowed_types=[MappingNode])
-                override_source_specs: List[RemoteSpec] = [RemoteSpec.new_from_node(node) for node in caches]
-                source_specs = (
-                    cli_source_remotes
-                    or override_source_specs
-                    or self._global_source_cache_specs
-                    or project.source_cache_specs
+                source_specs = cli_source_remotes or resolve_specs_for_project(
+                    project, self._global_source_cache_config, "source-caches", "source_cache_specs",
                 )
-                source_specs = list(utils._deduplicate(source_specs))
 
             # Store them for lookups later on
             project_artifact_cache_specs[project.name] = artifact_specs
@@ -703,3 +710,23 @@ class Context:
             remote_execution_specs = None
 
         return pull_artifact_files, remote_execution_specs
+
+
+# _CacheConfig
+#
+# A convenience object for parsing artifact/source cache configurations
+#
+class _CacheConfig:
+    def __init__(self, override_projects: bool, remote_specs: List[RemoteSpec]):
+        self.override_projects: bool = override_projects
+        self.remote_specs: List[RemoteSpec] = remote_specs
+
+    @classmethod
+    def new_from_node(cls, node: MappingNode) -> "_CacheConfig":
+        node.validate_keys(["override-project-caches", "servers"])
+        servers = node.get_sequence("servers", default=[], allowed_types=[MappingNode])
+
+        override_projects: bool = node.get_bool("push", default=False)
+        remote_specs: List[RemoteSpec] = [RemoteSpec.new_from_node(node) for node in servers]
+
+        return cls(override_projects, remote_specs)
diff --git a/src/buildstream/testing/runcli.py b/src/buildstream/testing/runcli.py
index 6a170a4..31b74c6 100644
--- a/src/buildstream/testing/runcli.py
+++ b/src/buildstream/testing/runcli.py
@@ -787,7 +787,7 @@ def cli_remote_execution(tmpdir, remote_services):
     if remote_services.artifact_storage_service:
         artifacts.append({"url": remote_services.artifact_storage_service, "push": True, "type": "storage"})
     if artifacts:
-        fixture.configure({"artifacts": artifacts})
+        fixture.configure({"artifacts": {"servers": artifacts}})
 
     remote_execution = {}
     if remote_services.action_service:
@@ -806,7 +806,7 @@ def cli_remote_execution(tmpdir, remote_services):
         fixture.configure({"remote-execution": remote_execution})
 
     if remote_services.source_service:
-        fixture.configure({"source-caches": [{"url": remote_services.source_service,}]})
+        fixture.configure({"source-caches": {"servers": [{"url": remote_services.source_service,}]}})
 
     return fixture
 
diff --git a/tests/artifactcache/capabilities.py b/tests/artifactcache/capabilities.py
index a74ed6c..09a6b59 100644
--- a/tests/artifactcache/capabilities.py
+++ b/tests/artifactcache/capabilities.py
@@ -27,7 +27,7 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 3bf853a..5223485 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -40,17 +40,22 @@ def configure_remote_caches(override_caches, project_caches=None, user_caches=No
 
     user_config = {}
     if user_caches:
-        user_config["artifacts"] = [
-            {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]} for cache in user_caches
-        ]
+        user_config["artifacts"] = {
+            "servers": [
+                {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
+                for cache in user_caches
+            ]
+        }
 
     if override_caches:
         user_config["projects"] = {
             "test": {
-                "artifacts": [
-                    {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
-                    for cache in override_caches
-                ]
+                "artifacts": {
+                    "servers": [
+                        {"url": cache.url, "push": cache.push, "type": type_strings[cache.remote_type]}
+                        for cache in override_caches
+                    ]
+                }
             }
         }
 
@@ -106,7 +111,8 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
         parsed_cache_specs = artifactcache._project_specs[project.name]
 
         # Verify that it was correctly read.
-        expected_cache_specs = list(_deduplicate(override_caches or user_caches or project_caches))
+        expected_cache_specs = list(_deduplicate(override_caches or user_caches))
+        expected_cache_specs = list(_deduplicate(expected_cache_specs + project_caches))
         assert parsed_cache_specs == expected_cache_specs
 
 
@@ -205,7 +211,7 @@ def test_paths_for_artifact_config_are_expanded(tmpdir, monkeypatch, artifacts_c
     project_config = {"name": "test", "min-version": "2.0"}
     user_config = {}
     if in_user_config:
-        user_config["artifacts"] = artifacts_config
+        user_config["artifacts"] = {"servers": artifacts_config}
     else:
         project_config["artifacts"] = artifacts_config
 
diff --git a/tests/artifactcache/pull.py b/tests/artifactcache/pull.py
index 502c350..28f8588 100644
--- a/tests/artifactcache/pull.py
+++ b/tests/artifactcache/pull.py
@@ -41,7 +41,7 @@ def test_pull(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
 
@@ -106,7 +106,7 @@ def test_pull_tree(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": rootcache_dir,
         }
 
diff --git a/tests/artifactcache/push.py b/tests/artifactcache/push.py
index 48985df..858065d 100644
--- a/tests/artifactcache/push.py
+++ b/tests/artifactcache/push.py
@@ -66,7 +66,7 @@ def test_push(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": rootcache_dir,
         }
 
@@ -95,10 +95,12 @@ def test_push_split(cli, tmpdir, datafiles):
         rootcache_dir = os.path.join(str(tmpdir), "cache")
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [
-                {"url": index.repo, "push": True, "type": "index"},
-                {"url": storage.repo, "push": True, "type": "storage"},
-            ],
+            "artifacts": {
+                "servers": [
+                    {"url": index.repo, "push": True, "type": "index"},
+                    {"url": storage.repo, "push": True, "type": "storage"},
+                ],
+            },
             "cachedir": rootcache_dir,
         }
         config_path = str(tmpdir.join("buildstream.conf"))
@@ -123,7 +125,7 @@ def test_push_message(tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": rootcache_dir,
         }
 
diff --git a/tests/frontend/artifact_checkout.py b/tests/frontend/artifact_checkout.py
index 329d2df..e8231b8 100644
--- a/tests/frontend/artifact_checkout.py
+++ b/tests/frontend/artifact_checkout.py
@@ -39,7 +39,7 @@ def test_checkout(cli, tmpdir, datafiles, deps, expect_exist, expect_noexist, wi
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Build the element to push it to cache
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Build it
         result = cli.run(project=project, args=["build", "target-import.bst"])
diff --git a/tests/frontend/artifact_delete.py b/tests/frontend/artifact_delete.py
index 90d448b..e120810 100644
--- a/tests/frontend/artifact_delete.py
+++ b/tests/frontend/artifact_delete.py
@@ -151,9 +151,7 @@ def test_artifact_delete_pulled_artifact_without_buildtree(cli, tmpdir, datafile
     # Set up remote and local shares
     local_cache = os.path.join(str(tmpdir), "artifacts")
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
-        cli.configure(
-            {"artifacts": [{"url": remote.repo, "push": True}], "cachedir": local_cache,}
-        )
+        cli.configure({"artifacts": {"servers": [{"url": remote.repo, "push": True}]}, "cachedir": local_cache})
 
         # Build the element
         result = cli.run(project=project, args=["build", element])
diff --git a/tests/frontend/artifact_pull.py b/tests/frontend/artifact_pull.py
index 926f96e..afd9f62 100644
--- a/tests/frontend/artifact_pull.py
+++ b/tests/frontend/artifact_pull.py
@@ -40,9 +40,7 @@ def test_pull(cli, tmpdir, datafiles, deps, expect_cached, with_project):
 
         # Build the element to push it to cache, and explicitly configure local cache so we can check it
         local_cache = os.path.join(str(tmpdir), "cache")
-        cli.configure(
-            {"cachedir": local_cache, "artifacts": [{"url": share.repo, "push": True}],}
-        )
+        cli.configure({"cachedir": local_cache, "artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Build it
         result = cli.run(project=project, args=["build", "target.bst"])
diff --git a/tests/frontend/artifact_show.py b/tests/frontend/artifact_show.py
index 7e8f8ee..652adfb 100644
--- a/tests/frontend/artifact_show.py
+++ b/tests/frontend/artifact_show.py
@@ -184,7 +184,7 @@ def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
     local_cache = os.path.join(str(tmpdir), "artifacts")
     with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
         cli.configure(
-            {"artifacts": [{"url": remote.repo, "push": True}], "cachedir": local_cache,}
+            {"artifacts": {"servers": [{"url": remote.repo, "push": True}]}, "cachedir": local_cache,}
         )
 
         # Build the element
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 3a1d650..628e886 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -124,7 +124,7 @@ def test_non_strict_pull_build_strict_checkout(datafiles, cli, tmpdir):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})
 
         # First build it in non-strict mode with an artifact server configured.
         # With this configuration BuildStream will attempt to pull the build-only
@@ -1089,7 +1089,7 @@ def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["source", "track", input_name])
         result.assert_success()
@@ -1124,7 +1124,7 @@ def test_partial_checkout_fail(tmpdir, datafiles, cli):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         res = cli.run(project=project, args=["artifact", "checkout", "--pull", build_elt, "--directory", checkout_dir])
         res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
diff --git a/tests/frontend/default_target.py b/tests/frontend/default_target.py
index f6573c6..a10133d 100644
--- a/tests/frontend/default_target.py
+++ b/tests/frontend/default_target.py
@@ -177,7 +177,7 @@ def test_default_target_push_pull(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Push the artifacts
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["artifact", "push"])
         result.assert_success()
 
diff --git a/tests/frontend/large_directory.py b/tests/frontend/large_directory.py
index ea29fd1..f8ac932 100644
--- a/tests/frontend/large_directory.py
+++ b/tests/frontend/large_directory.py
@@ -64,7 +64,7 @@ def test_large_directory(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Configure bst to push to the artifact share
-        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})
 
         # Enforce 1 MB gRPC message limit
         with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 6733584..3e50b72 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -46,7 +46,7 @@ def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", target])
         result.assert_success()
 
@@ -90,7 +90,9 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
     ) as share2:
 
         # Build the target and push it to share2 only.
-        cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
+        cli.configure(
+            {"artifacts": {"servers": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]}}
+        )
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
 
@@ -137,7 +139,7 @@ def test_push_pull_specific_remote(cli, tmpdir, datafiles):
 
         # Configure the default push location to be bad_share; we will assert that
         # nothing actually gets pushed there.
-        cli.configure({"artifacts": [{"url": bad_share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": bad_share.repo, "push": True},]}})
 
         # Now try `bst artifact push` to the good_share.
         result = cli.run(project=project, args=["artifact", "push", "target.bst", "--remote", good_share.repo])
@@ -173,7 +175,9 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "projects": {"test": {"strict": False}}})
+        cli.configure(
+            {"artifacts": {"servers": [{"url": share.repo, "push": True}]}, "projects": {"test": {"strict": False}}}
+        )
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
         assert cli.get_element_state(project, "target.bst") == "cached"
@@ -223,7 +227,7 @@ def test_push_pull_cross_junction(cli, tmpdir, datafiles):
         generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"])
         result.assert_success()
         assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
@@ -288,7 +292,7 @@ def test_pull_missing_blob(cli, tmpdir, datafiles):
     project = str(datafiles)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         _test_pull_missing_blob(cli, project, share, share)
 
@@ -303,10 +307,12 @@ def test_pull_missing_blob_split_share(cli, tmpdir, datafiles):
     with create_split_share(indexshare, storageshare) as (index, storage):
         cli.configure(
             {
-                "artifacts": [
-                    {"url": index.repo, "push": True, "type": "index"},
-                    {"url": storage.repo, "push": True, "type": "storage"},
-                ]
+                "artifacts": {
+                    "servers": [
+                        {"url": index.repo, "push": True, "type": "index"},
+                        {"url": storage.repo, "push": True, "type": "storage"},
+                    ]
+                }
             }
         )
 
@@ -343,7 +349,7 @@ def test_pull_missing_local_blob(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the import-bin element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["source", "track", input_name])
         result.assert_success()
@@ -374,7 +380,7 @@ def test_pull_missing_notifies_user(caplog, cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})
         result = cli.run(project=project, args=["build", "target.bst"])
 
         result.assert_success()
@@ -394,7 +400,7 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
     ) as sharecli:
 
         # Configure shareuser remote in user conf
-        cli.configure({"artifacts": [{"url": shareuser.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": shareuser.repo, "push": True}]}})
 
         # Push the artifacts to the shareuser remote.
         # Assert that shareuser has the artfifacts cached, but sharecli doesn't,
@@ -449,7 +455,7 @@ def test_pull_access_rights(cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", "compose-all.bst"])
         result.assert_success()
 
@@ -511,7 +517,7 @@ def test_pull_artifact(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["build", element])
         result.assert_success()
@@ -549,7 +555,7 @@ def test_dynamic_build_plan(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
         # First build the target element and push to the remote.
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", target])
         result.assert_success()
 
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 8f71c8a..4059b7c 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -71,12 +71,14 @@ def test_push(cli, tmpdir, datafiles):
 
             # Configure bst to pull but not push from a cache and run `bst artifact push`.
             # This should also fail.
-            cli.configure({"artifacts": [{"url": share1.repo, "push": False},]})
+            cli.configure({"artifacts": {"servers": [{"url": share1.repo, "push": False}]}})
             result = cli.run(project=project, args=["artifact", "push", "target.bst"])
             result.assert_main_error(ErrorDomain.STREAM, None)
 
             # Configure bst to push to one of the caches and run `bst artifact push`. This works.
-            cli.configure({"artifacts": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]})
+            cli.configure(
+                {"artifacts": {"servers": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]}}
+            )
             cli.run(project=project, args=["artifact", "push", "target.bst"])
 
             assert_not_shared(cli, share1, project, "target.bst")
@@ -85,7 +87,9 @@ def test_push(cli, tmpdir, datafiles):
         # Now try pushing to both
 
         with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
-            cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
+            cli.configure(
+                {"artifacts": {"servers": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]}}
+            )
             cli.run(project=project, args=["artifact", "push", "target.bst"])
 
             assert_shared(cli, share1, project, "target.bst")
@@ -125,7 +129,7 @@ def test_push_artifact(cli, tmpdir, datafiles):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": [{"url": share.repo, "push": True,}],
+                "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             }
         )
 
@@ -161,7 +165,7 @@ def test_push_artifact_glob(cli, tmpdir, datafiles):
         assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))
 
         # Configure artifact share
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Run bst artifact push with a wildcard, there is only one artifact
         # matching "test/target/*", even though it can be accessed both by it's
@@ -184,7 +188,7 @@ def test_push_fails(cli, tmpdir, datafiles):
     # Set up the share
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
         # Configure bst to be able to push to the share
-        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})
 
         # First ensure that the target is *NOT* cache
         assert cli.get_element_state(project, "target.bst") != "cached"
@@ -222,7 +226,7 @@ def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
         assert cli.get_element_state(project, "import-dev.bst") != "cached"
 
         # Configure bst to be able to push to the share
-        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})
 
         # Now try and push the target with its deps using --on-error continue
         # and assert that push failed, but what could be pushed was pushed
@@ -280,7 +284,7 @@ def test_push_deps(cli, tmpdir, datafiles, deps, expected_states):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": [{"url": share.repo, "push": True,}],
+                "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             }
         )
 
@@ -329,7 +333,7 @@ def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
                 #        only, but it should probably be fixed.
                 #
                 "scheduler": {"pushers": 1},
-                "artifacts": [{"url": share.repo, "push": True,}],
+                "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             }
         )
 
@@ -352,7 +356,7 @@ def test_push_after_pull(cli, tmpdir, datafiles):
 
         # Set the scene: share1 has the artifact, share2 does not.
         #
-        cli.configure({"artifacts": [{"url": share1.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share1.repo, "push": True},]}})
 
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
@@ -376,7 +380,9 @@ def test_push_after_pull(cli, tmpdir, datafiles):
 
         # Now we add share2 into the mix as a second push remote. This time,
         # `bst build` should push to share2 after pulling from share1.
-        cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
+        cli.configure(
+            {"artifacts": {"servers": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]}}
+        )
         result = cli.run(project=project, args=["build", "target.bst"])
         result.assert_success()
         assert result.get_pulled_elements() == ["target.bst"]
@@ -396,7 +402,7 @@ def test_artifact_expires(cli, datafiles, tmpdir):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
 
         # Configure bst to push to the cache
-        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})
 
         # Create and build an element of 15 MB
         create_element_size("element1.bst", project, element_path, [], int(15e6))
@@ -446,9 +452,7 @@ def test_artifact_too_large(cli, datafiles, tmpdir):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)) as share:
 
         # Configure bst to push to the remote cache
-        cli.configure(
-            {"artifacts": [{"url": share.repo, "push": True}],}
-        )
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}],}})
 
         # Create and push a 3MB element
         create_element_size("small_element.bst", project, element_path, [], int(3e6))
@@ -499,9 +503,7 @@ def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:
 
         # Configure bst to push to the cache
-        cli.configure(
-            {"artifacts": [{"url": share.repo, "push": True}],}
-        )
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}],}})
 
         # Create and build 2 elements, one 5 MB and one 15 MB.
         create_element_size("element1.bst", project, element_path, [], int(5e6))
@@ -563,9 +565,7 @@ def test_push_cross_junction(cli, tmpdir, datafiles):
     assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure(
-            {"artifacts": [{"url": share.repo, "push": True}],}
-        )
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}],}})
         cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"])
 
         cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
@@ -579,7 +579,7 @@ def test_push_already_cached(caplog, cli, tmpdir, datafiles):
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["build", "target.bst"])
 
         result.assert_success()
@@ -607,7 +607,7 @@ def test_build_remote_option(caplog, cli, tmpdir, datafiles):
             projconf.write("artifacts:\n- url: {}\n  push: True".format(shareproject.repo))
 
         # Configure shareuser remote in user conf
-        cli.configure({"artifacts": [{"url": shareuser.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": shareuser.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])
 
@@ -634,7 +634,9 @@ def test_push_no_strict(caplog, cli, tmpdir, datafiles, buildtrees):
     caplog.set_level(1)
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}], "projects": {"test": {"strict": False}}})
+        cli.configure(
+            {"artifacts": {"servers": [{"url": share.repo, "push": True}]}, "projects": {"test": {"strict": False}}}
+        )
 
         # First get us a build
         result = cli.run(project=project, args=["build", "target.bst"])
@@ -693,7 +695,7 @@ def test_push_after_rebuild(cli, tmpdir, datafiles):
     assert cli.get_element_state(project, "random.bst") != "cached"
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Now rebuild the element and push it
         result = cli.run(project=project, args=["build", "random.bst"])
@@ -718,7 +720,7 @@ def test_push_update_after_rebuild(cli, tmpdir, datafiles):
     )
 
     with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         # Build the element and push the artifact
         result = cli.run(project=project, args=["build", "random.bst"])
@@ -737,6 +739,6 @@ def test_push_update_after_rebuild(cli, tmpdir, datafiles):
         assert cli.get_element_state(project, "random.bst") == "cached"
 
         # Push the new build
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
         result = cli.run(project=project, args=["artifact", "push", "random.bst"])
         assert result.get_pushed_elements() == ["random.bst"]
diff --git a/tests/frontend/remote-caches.py b/tests/frontend/remote-caches.py
index 03d728d..103af10 100644
--- a/tests/frontend/remote-caches.py
+++ b/tests/frontend/remote-caches.py
@@ -45,8 +45,8 @@ def test_source_artifact_caches(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cachedir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -80,8 +80,8 @@ def test_source_cache_empty_artifact_cache(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
-            "artifacts": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
+            "artifacts": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cachedir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/frontend/workspace.py b/tests/frontend/workspace.py
index 3a8bc22..17ff8b2 100644
--- a/tests/frontend/workspace.py
+++ b/tests/frontend/workspace.py
@@ -1030,7 +1030,7 @@ def test_external_push_pull(cli, datafiles, tmpdir_factory, guess_element):
         result = cli.run(project=project, args=["-C", workspace, "build", element_name])
         result.assert_success()
 
-        cli.configure({"artifacts": [{"url": share.repo, "push": True}]})
+        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}]}})
 
         result = cli.run(project=project, args=["-C", workspace, "artifact", "push", *arg_elm])
         result.assert_success()
diff --git a/tests/remotecache/simple.py b/tests/remotecache/simple.py
index 4f8896a..00b32ec 100644
--- a/tests/remotecache/simple.py
+++ b/tests/remotecache/simple.py
@@ -57,7 +57,7 @@ def test_remote_autotools_build_no_cache(cli, datafiles):
     checkout = os.path.join(cli.directory, "checkout")
     element_name = "autotools/amhello.bst"
 
-    cli.configure({"artifacts": [{"url": "http://fake.url.service", "push": True}]})
+    cli.configure({"artifacts": {"servers": [{"url": "http://fake.url.service", "push": True}]}})
     result = cli.run(project=project, args=["build", element_name])
     result.assert_success()
 
diff --git a/tests/sourcecache/capabilities.py b/tests/sourcecache/capabilities.py
index 67be749..7ca6581 100644
--- a/tests/sourcecache/capabilities.py
+++ b/tests/sourcecache/capabilities.py
@@ -25,7 +25,11 @@ def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafi
         # Configure artifact share
         cache_dir = os.path.join(str(tmpdir), "cache")
         user_config_file = str(tmpdir.join("buildstream.conf"))
-        user_config = {"scheduler": {"pushers": 1}, "source-caches": [{"url": share.repo,}], "cachedir": cache_dir}
+        user_config = {
+            "scheduler": {"pushers": 1},
+            "source-caches": {"servers": [{"url": share.repo,}]},
+            "cachedir": cache_dir,
+        }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
 
         with dummy_context(config=user_config_file) as context:
diff --git a/tests/sourcecache/fetch.py b/tests/sourcecache/fetch.py
index 7b2c63a..7af6ffc 100644
--- a/tests/sourcecache/fetch.py
+++ b/tests/sourcecache/fetch.py
@@ -51,7 +51,7 @@ def context_with_source_cache(cli, cache, share, tmpdir):
     user_config_file = str(tmpdir.join("buildstream.conf"))
     user_config = {
         "scheduler": {"pushers": 1},
-        "source-caches": [{"url": share.repo,}],
+        "source-caches": {"servers": [{"url": share.repo,}]},
         "cachedir": cache,
     }
     _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/sourcecache/push.py b/tests/sourcecache/push.py
index bc591ad..edbcfdf 100644
--- a/tests/sourcecache/push.py
+++ b/tests/sourcecache/push.py
@@ -62,10 +62,12 @@ def test_source_push_split(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [
-                {"url": index.repo, "push": True, "type": "index"},
-                {"url": storage.repo, "push": True, "type": "storage"},
-            ],
+            "source-caches": {
+                "servers": [
+                    {"url": index.repo, "push": True, "type": "index"},
+                    {"url": storage.repo, "push": True, "type": "storage"},
+                ]
+            },
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -115,7 +117,7 @@ def test_source_push(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -165,7 +167,7 @@ def test_push_pull(cli, datafiles, tmpdir):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -203,7 +205,7 @@ def test_push_fail(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
@@ -234,7 +236,7 @@ def test_source_push_build_fail(cli, tmpdir, datafiles):
     with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         cli.configure(user_config)
@@ -275,7 +277,7 @@ def test_push_missing_source_after_build(cli, tmpdir, datafiles):
         user_config_file = str(tmpdir.join("buildstream.conf"))
         user_config = {
             "scheduler": {"pushers": 1},
-            "source-caches": [{"url": share.repo, "push": True,}],
+            "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             "cachedir": cache_dir,
         }
         _yaml.roundtrip_dump(user_config, file=user_config_file)
diff --git a/tests/sourcecache/workspace.py b/tests/sourcecache/workspace.py
index 7dccbe0..c9cb79b 100644
--- a/tests/sourcecache/workspace.py
+++ b/tests/sourcecache/workspace.py
@@ -75,7 +75,7 @@ def test_workspace_open_no_source_push(tmpdir, datafiles, cli):
             {
                 "cachedir": cache_dir,
                 "scheduler": {"pushers": 1},
-                "source-caches": [{"url": share.repo, "push": True,}],
+                "source-caches": {"servers": [{"url": share.repo, "push": True,}]},
             }
         )
 


[buildstream] 01/02: doc: Redocumenting artifact/source cache servers.

Posted by tv...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

tvb pushed a commit to branch tristan/change-remote-config
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 76489ef46d0aa4faf7c81b1f9c00054f8275cffe
Author: Tristan van Berkom <tr...@codethink.co.uk>
AuthorDate: Thu Jan 28 13:59:02 2021 +0900

    doc: Redocumenting artifact/source cache servers.
---
 doc/source/format_project.rst                 | 161 ++-------
 doc/source/using_config.rst                   | 464 +++++++++++++++++++-------
 doc/source/using_configuring_cache_server.rst |  57 +---
 src/buildstream/plugins/elements/stack.py     |   2 +-
 4 files changed, 401 insertions(+), 283 deletions(-)

diff --git a/doc/source/format_project.rst b/doc/source/format_project.rst
index 0216e52..0a7d6ef 100644
--- a/doc/source/format_project.rst
+++ b/doc/source/format_project.rst
@@ -192,165 +192,70 @@ for more detail.
      build-gid: 1001
 
 
-.. _project_essentials_artifacts:
+.. _project_artifact_cache:
 
 Artifact server
 ~~~~~~~~~~~~~~~
-If you have setup an :ref:`artifact server <cache_servers>` for your
-project then it is convenient to configure the following in your ``project.conf``
-so that users need not have any additional configuration to communicate
-with an artifact share.
+When maintaining a BuildStream project, it can be convenient to downstream users
+of your project to provide access to a :ref:`cache server <cache_servers>` you maintain.
+
+The project can provide *recommended* artifact cache servers through project configuration
+using the same semantics as one normally uses in :ref:`user configuration <config_cache_servers>`:
 
 .. code:: yaml
 
   #
-  # Artifacts
+  # A remote cache from which to download prebuilt artifacts
   #
   artifacts:
-    # A remote cache from which to download prebuilt artifacts
-    - url: https://foo.com:11001
-      server-cert: server.crt
-    # A remote cache from which to upload/download built/prebuilt artifacts
-    - url: https://foo.com:11002
-      push: true
+  - url: https://foo.com:11001
+    auth:
       server-cert: server.crt
-      client-cert: client.crt
-      client-key: client.key
-
-.. note::
-
-    You can also specify a list of different caches here; earlier entries in the
-    list will have higher priority than later ones.
-
-The use of ports are required to distinguish between pull only access and
-push/pull access. For information regarding the server/client certificates
-and keys, please see: :ref:`Key pair for the server <server_authentication>`.
-
-.. note::
-
-   Buildstream artifact servers have changed since 1.2 to use protocol buffers
-   to store artifact information rather than a directory structure, as well as a
-   new server API. As a result newer buildstream clients won't work with older
-   servers.
 
+.. attention::
 
-.. _project_essentials_split_artifacts:
+   Unlike user configuration, the filenames provided in the :ref:`auth <config_remote_auth>`
+   configuration block are relative to the :ref:`project directory <format_structure>`.
 
-Split cache servers
-~~~~~~~~~~~~~~~~~~~
+   It is recommended to include public keys such as the ``server-cert`` along with your
+   project so that downstream users can have automatic read access to your project.
 
-Should you need to configure an artifact cache to work with a CAS
-server that does not support BuildStream's artifact format, you can
-"split" that cache and run an artifacts-only server separately. The
-format for that is as such:
-
-.. code:: yaml
-
-  #
-  # Artifacts
-  #
-  artifacts:
-    # A remote cache from which to download prebuilt artifacts
-    - url: https://storage.foo.com:11001
-      server-cert: server.crt
-      # "storage" remotes store the artifact contents only - this can
-      # be a normal CAS implementation such as those provided by
-      # Buildbarn, BuildGrid, or Bazel Buildfarm
-      type: storage
-    - url: https://index.foo.com:11001
-      server-cert: server.crt
-      # "index" remotes store only artifact metadata. This is
-      # currently only provided by the bst-artifact-server
-      type: index
-    # A remote cache from which to upload/download built/prebuilt artifacts
-    - url: https://foo.com:11002
-      push: true
-      server-cert: server.crt
-      client-cert: client.crt
-      client-key: client.key
-      # Caches that support both can omit the type, or set it to "both" -
-      # currently, also only supported by bst-artifact-server
-      type: both
+   To provide write access to downstream users, it is recommended that the required
+   private keys such as the ``client-key`` be provided to users out of band,
+   and require that users configure write access separately in their own
+   :ref:`user configuration <config_cache_servers>`.
 
 
 .. _project_source_cache:
 
 Source cache server
 ~~~~~~~~~~~~~~~~~~~
-Exactly the same as artifact servers, source cache servers can be specified.
+In the same way as artifact cache servers, the project can provide *recommended* source cache
+servers through project configuration using the same semantics as one normally uses in
+:ref:`user configuration <config_cache_servers>`:
 
 .. code:: yaml
 
   #
-  # Source caches
+  # A remote cache from which to download prestaged sources
   #
   source-caches:
-    # A remote cache from which to download prestaged sources
-    - url: https://foo.com:11001
-      server.cert: server.crt
-    # A remote cache from which to upload/download prestaged sources
-    - url: https://foo.com:11002
-      push: true
+  - url: https://foo.com:11001
+    auth:
       server-cert: server.crt
-      client-cert: client.crt
-      client-key: client.key
-
-.. note::
 
-   Source caches also support "splitting" like :ref:`artifact servers
-   <project_essentials_split_artifacts>`.
-
-
-.. _project_remote_execution:
-
-Remote execution
-~~~~~~~~~~~~~~~~
-BuildStream supports remote execution using the Google Remote Execution API
-(REAPI). A description of how remote execution works is beyond the scope
-of this document, but you can specify a remote server complying with the REAPI
-using the `remote-execution` option:
+.. attention::
 
-.. code:: yaml
+   Unlike user configuration, the filenames provided in the :ref:`auth <config_remote_auth>`
+   configuration block are relative to the :ref:`project directory <format_structure>`.
 
-  remote-execution:
+   It is recommended to include public keys such as the ``server-cert`` along with your
+   project so that downstream users can have automatic read access to your project.
 
-    # A url defining a remote execution server
-    execution-service:
-      url: http://buildserver.example.com:50051
-      instance-name: development-emea-1
-    storage-service:
-      url: https://foo.com:11002/
-      server-cert: server.crt
-      client-cert: client.crt
-      client-key: client.key
-      instance-name: development-emea-1
-    action-cache-service:
-      url: http://bar.action.com:50052
-      instance-name: development-emea-1
-
-storage-service specifies a remote CAS store and the parameters are the
-same as those used to specify an :ref:`artifact server <cache_servers>`.
-
-The action-cache-service specifies where built actions are cached, allowing
-buildstream to check whether an action has already been executed and download it
-if so. This is similar to the artifact cache but REAPI specified, and is
-optional for remote execution to work.
-
-The storage service may be the same endpoint used for artifact
-caching. Remote execution cannot work without push access to the
-storage endpoint though.
-
-Instance name is optional. Instance names separate different shards on
-the same endpoint (url).  You can supply a different instance name for
-`execution-service` and `storage-service`, if needed.  The instance
-name should be given to you by the service provider of each
-service. Not all remote execution and storage services support
-instance names.
-
-The Remote Execution API can be found via https://github.com/bazelbuild/remote-apis.
-
-Remote execution configuration can be also provided in the `user
-configuration <user_config_remote_execution>`.
+   To provide write access to downstream users, it is recommended that the required
+   private keys such as the ``client-key`` be provided to users out of band,
+   and require that users configure write access separately in their own
+   :ref:`user configuration <config_cache_servers>`.
 
 
 .. _project_essentials_mirrors:
diff --git a/doc/source/using_config.rst b/doc/source/using_config.rst
index ba38173..c4b4c18 100644
--- a/doc/source/using_config.rst
+++ b/doc/source/using_config.rst
@@ -34,183 +34,421 @@ will be ``~/.config/buildstream.conf``
    any version will fallback to ``$XDG_CONFIG_HOME/buildstream.conf``.
 
 
-Project specific value
-----------------------
-The ``projects`` key can be used to specify project specific configurations,
-the supported configurations on a project wide basis are listed here.
+Remote services
+---------------
+BuildStream can be configured to cooperate with remote caches and
+execution services.
+
+
+.. _config_remote_auth:
+
+Authentication
+~~~~~~~~~~~~~~
+BuildStream supports end to end encryption when communicating with remote
+services.
+
+All remote service configuration blocks come with an optional ``auth``
+configuration block which allows one to specify the certificates
+and keys required for encrypted traffic.
+
+See the :ref:`server configuration documentation <server_authentication>` for
+details on how the keys can be generated and managed on the server side.
+
+The ``auth`` configuration block looks like this:
+
+.. code:: yaml
+
+   auth:
+     server-cert: server.crt
+     client-cert: client.crt
+     client-key: client.key
+
+**Attributes:**
+
+* ``server-cert``
+
+  The server certificate is used to decrypt traffic coming from the
+  server.
+
+* ``client-cert``
+
+  The client certificate is used by the remote server to decrypt
+  traffic being uploaded to the server.
+
+  The remote server will have it's own copy of this certificate, but the
+  client needs to send this certificate's identity to the server so that
+  the server knows which certificate to use.
+
+* ``client-key``
+
+  The client key is used to encrypt traffic when uploading traffic
+  to the server.
+
+Normally, only the ``server-cert`` is required to securely *download* data
+from remote cache services, while both the ``client-key`` and ``client-cert``
+is required to securely *upload* data to the server.
+
+
+.. _config_cache_servers:
+
+Cache servers
+~~~~~~~~~~~~~
+BuildStream supports two types of cache servers, :ref:`source cache servers <config_source_caches>`
+and :ref:`artifact cache servers <config_artifact_caches>`. These services allow you
+to store sources and build artifacts for later reuse, and share them among your
+peers.
+
+.. important::
+
+   **Storing and indexing**
+
+   Cache servers are split into two separate services, the *index* and the *storage*.
+   Sometimes these services are provided by the same server, and sometimes it is desirable
+   to use different cache servers for indexing and storing data.
+
+   In simple setups, it is possible to use the same cache server for indexing and storing
+   of both sources and artifacts. However, when using :ref:`remote execution <user_config_remote_execution>`
+   it is recommended to use the remote execution build cluster's ``storage-service`` as the *storage*
+   service of your cache servers, which may require setting up your *index* service separately.
+
+   When configuring cache servers, BuildStream will require both storage and indexing capabilities,
+   otherwise no attempt will be made to fetch or push data to and from cache servers.
+
+Cache server configuration is declared in the following way:
+
+.. code:: yaml
+
+   url: https://cache-server.com/cache:11001
+   instance-name: main
+   type: both
+   push: true
+   auth:
+     server-cert: server.crt
+     client-cert: client.crt
+     client-key: client.key
+
+**Attributes:**
+
+* ``url``
+
+  Indicates the ``http`` or ``https`` url and optionally the port number of
+  where the cache server is located.
+
+* ``instance-name``
+
+  Instance names separate different shards on the same endpoint (``url``).
+
+  The instance name is optional, and not all cache server implementations support
+  instance names. The instance name should be given to you by the
+  service provider of each service.
+
+* ``type``
+
+  The type of service you intend to use this cache server for. If unspecified,
+  the default value for this field is ``both``.
+
+  * ``storage``
+
+    Use this cache service for storage.
+
+  * ``index``
+
+    Use this cache service for index content expected to be present in one
+    or more *storage* services.
+
+  * ``both``
+
+    Use this cache service for both indexing and storing data.
+
+
+* ``push``
+
+  Set this to ``true`` if you intend to upload data to this cache server.
+
+  Normally this requires additional credentials in the ``auth`` field.
+
+* ``auth``
+
+  The :ref:`authentication attributes <config_remote_auth>` to connect to
+  this server.
+
+
+.. _config_cache_server_list:
+
+Cache server lists
+''''''''''''''''''
+Cache servers are always specified as *lists* in the configuration, this allows
+*index* and *storage* services to be declared separately, and also allows for
+some redundancy.
+
+**Example:**
+
+.. code:: yaml
+
+   - url: https://cache-server-1.com/index
+     type: index
+   - url: https://cache-server-1.com/storage
+     type: storage
+   - url: https://cache-server-2.com
+     type: both
+
+When downloading data from a cache server, BuildStream will iterate over each
+*index* service one by one until it finds the reference to the data it is looking
+for, and then it will iterate over each *storage* service one by one, downloading
+the referenced data until all data is downloaded.
+
+When uploading data to a cache server, BuildStream will first upload the data to
+each *storage* service which was configured with the ``push`` attribute, and
+upon successful upload, it will proceed to upload the references to the uploaded
+data to each *index* service in the list.
 
-.. _config_artifacts:
 
-Artifact server
-~~~~~~~~~~~~~~~
-Although project's often specify a :ref:`remote artifact cache <cache_servers>`
-in their ``project.conf``, you may also want to specify extra caches.
+.. _config_artifact_caches:
 
-Assuming that your host/server is reachable on the internet as ``artifacts.com``
-(for example), there are two ways to declare remote caches in your user
-configuration:
+Artifact cache servers
+~~~~~~~~~~~~~~~~~~~~~~
+Using artifact :ref:`cache servers <config_cache_servers>` is an essential means of
+*build avoidance*, as it will allow you to avoid building an element which has already
+been built and uploaded to a common artifact server.
 
-1. Adding global caches:
+Artifact cache servers can be declared in three different ways, with differing
+priorities.
+
+
+Global caches
+'''''''''''''
+To declare the global artifact server list, use the ``artifacts`` key at the
+toplevel of the user configuration.
 
 .. code:: yaml
 
    #
-   # Artifacts
+   # Configure a global artifact server for pushing and pulling artifacts
    #
    artifacts:
-     # Add a cache to pull from
-     - url: https://artifacts.com/artifacts:11001
-       server-cert: server.crt
-     # Add a cache to push/pull to/from
-     - url: https://artifacts.com/artifacts:11002
+   - url: https://artifacts.com/artifacts:11001
+     push: true
+     auth:
        server-cert: server.crt
        client-cert: client.crt
        client-key: client.key
-       push: true
-     # Add another cache to pull from
-     - url: https://anothercache.com/artifacts:8080
-       server-cert: another_server.crt
 
-.. note::
-
-    Caches declared here will be used by **all** BuildStream project's on the user's
-    machine and are considered a lower priority than those specified in the project
-    configuration.
 
+Project overrides
+'''''''''''''''''
+To declare artifact servers lists for individual projects, declare them
+in the :ref:`project specific section <user_config_project_overrides>` of
+the user configuration.
 
-2. Specifying caches for a specific project within the user configuration:
+Artifact server lists declared in this section will only be used for
+elements belonging to the specified project, and will be used instead of
+artifact cache servers declared in the global caches.
 
 .. code:: yaml
 
+   #
+   # Configure an artifact server for pushing and pulling artifacts from project "foo"
+   #
    projects:
-     project-name:
+     foo:
        artifacts:
-         # Add a cache to pull from
-         - url: https://artifacts.com/artifacts:11001
-           server-cert: server.crt
-         # Add a cache to push/pull to/from
-         - url: https://artifacts.com/artifacts:11002
+       - url: https://artifacts.com/artifacts:11001
+         push: true
+         auth:
            server-cert: server.crt
            client-cert: client.crt
            client-key: client.key
-           push: true
-         # Add another cache to pull from
-         - url: https://ourprojectcache.com/artifacts:8080
-           server-cert: project_server.crt
 
 
-.. note::
+Project recommendations
+'''''''''''''''''''''''
+Projects can :ref:`recommend artifact cache servers <project_artifact_cache>` in their
+individual project configuration files.
 
-    Caches listed here will be considered a higher priority than those specified
-    by the project. Furthermore, for a given list of URLs, earlier entries will
-    have higher priority.
+These will only be used for elements belonging to their respective projects, and
+are the lowest priority configuration.
 
 
-Notice that the use of different ports for the same server distinguishes between
-pull only access and push/pull access. For information regarding this and the
-server/client certificates and keys, please see:
-:ref:`Key pair for the server <server_authentication>`.
+.. _config_source_caches:
 
-.. _config_sources:
+Source cache servers
+~~~~~~~~~~~~~~~~~~~~
+Using source :ref:`cache servers <config_cache_servers>` enables BuildStream to cache
+source code referred to by your project and share those sources with peers who have
+access to the same source cache server.
 
-Source cache server
-~~~~~~~~~~~~~~~~~~~
-Similarly global and project specific source caches servers can be specified in
-the user configuration.
+This can optimize your build times in the case that it is determined that an element needs
+to be rebuilt because of changes in the dependency graph, as BuildStream will first attempt
+to download the source code from the cache server before attempting to obtain it from an
+external source, which may suffer higher latencies.
+
+Source cache servers can be declared in three different ways, with differing
+priorities.
 
-1. Global source caches
+
+Global caches
+'''''''''''''
+To declare the global source cache server list, use the ``source-caches`` key at the
+toplevel of the user configuration.
 
 .. code:: yaml
 
    #
-   # Source caches
+   # Configure a global source cache server for pushing and pulling sources
    #
    source-caches:
-     # Add a cache to pull from
-     - url: https://cache.com/sources:11001
-       server-cert: server.crt
-     # Add a cache to push/pull to/from
-     - url: https://cache.com/sources:11002
+   - url: https://sources.com/sources:11001
+     push: true
+     auth:
        server-cert: server.crt
        client-cert: client.crt
        client-key: client.key
-       push: true
-     # Add another cache to pull from
-     - url: https://anothercache.com/sources:8080
-       server-cert: another_server.crt
 
-2. Project specific source caches
+
+Project overrides
+'''''''''''''''''
+To declare source cache servers lists for individual projects, declare them
+in the :ref:`project specific section <user_config_project_overrides>` of
+the user configuration.
+
+Source cache server lists declared in this section will only be used for
+elements belonging to the specified project, and will be used instead of
+source cache servers declared in the global caches.
 
 .. code:: yaml
 
+   #
+   # Configure a source cache server for pushing and pulling sources from project "foo"
+   #
    projects:
-     project-name:
-       artifacts:
-         # Add a cache to pull from
-         - url: https://cache.com/sources:11001
-           server-cert: server.crt
-         # Add a cache to push/pull to/from
-         - url: https://cache.com/sources:11002
+     foo:
+       source-caches:
+       - url: https://sources.com/sources:11001
+         push: true
+         auth:
            server-cert: server.crt
            client-cert: client.crt
            client-key: client.key
-           push: true
-         # Add another cache to pull from
-         - url: https://ourprojectcache.com/sources:8080
-           server-cert: project_server.crt
+
+
+Project recommendations
+'''''''''''''''''''''''
+Projects can :ref:`recommend source cache servers <project_source_cache>` in their
+individual project configuration files.
+
+These will only be used for elements belonging to their respective projects, and
+are the lowest priority configuration.
+
 
 .. _user_config_remote_execution:
 
 Remote execution
 ~~~~~~~~~~~~~~~~
+BuildStream supports building remotely using the
+`Google Remote Execution API (REAPI). <https://github.com/bazelbuild/remote-apis>`_.
+
+You can configure the remote execution services globally in your user configuration
+using the ``remote-execution`` key, like so:
 
-The configuration for :ref:`remote execution <project_remote_execution>`
-in ``project.conf`` can be provided in the user configuation. The global
-configuration also has a ``pull-artifact-files`` option, which specifies when
-remote execution is being performed whether to pull file blobs of artifacts, or
-just the directory trees required to perform remote builds.
+.. code:: yaml
 
-There is only one remote execution configuration used per project.
+   remote-execution:
+     pull-artifact-files: True
+     execution-service:
+       url: http://execution.fallback.example.com:50051
+       instance-name: main
+     storage-service:
+       url: https://storage.fallback.example.com:11002
+       instance-name: main
+       auth:
+         server-cert: /keys/server.crt
+         client-cert: /keys/client.crt
+         client-key: /keys/client.key
+     action-cache-service:
+       url: http://cache.flalback.example.com:50052
+       instance-name: main
 
-The project overrides will be taken in priority. The global
-configuration will be used as fallback.
+**Attributes:**
 
-1. Global remote execution fallback:
+* ``pull-artifact-files``
 
-.. code:: yaml
+  This determines whether you want the artifacts which were built remotely
+  to be downloaded into the local CAS, so that it is ready for checkout
+  directly after a built completes.
+
+  If this is set to ``false``, then you will need to download the artifacts
+  you intend to use with :ref:`bst artifact checkout <invoking_artifact_checkout>`
+  after your build completes.
+
+* ``execution-service``
+
+  A :ref:`service configuration <user_config_remote_execution_service>` specifying
+  how to connect with the main *execution service*, this service is the main controlling
+  entity in a remote execution build cluster.
+
+* ``storage-service``
+
+  A :ref:`service configuration <user_config_remote_execution_service>` specifying
+  how to connect with the *Content Addressable Storage* service, this is where build
+  input and output is stored on the remote execution build cluster.
+
+  This service is compatible with the *storage* service offered by
+  :ref:`cache servers <config_cache_servers>`.
+
+* ``action-cache-service``
+
+  A :ref:`service configuration <user_config_remote_execution_service>` specifying
+  how to connect with the *action cache*, this service stores information about
+  activities which clients request be performed by workers on the remote execution
+  build cluster, and results of completed operations.
+
+  This service is optional in a remote execution build cluster, if your remote
+  execution service provides an action cache, then you should configure it here.
+
+
+.. _user_config_remote_execution_service:
 
-  remote-execution:
-    execution-service:
-      url: http://execution.fallback.example.com:50051
-      instance-name: main
-    storage-service:
-      url: https://storage.fallback.example.com:11002
-      server-cert: /keys/server.crt
-      client-cert: /keys/client.crt
-      client-key: /keys/client.key
-      instance-name: main
-    action-cache-service:
-      url: http://cache.flalback.example.com:50052
-      instance-name: main
-    pull-artifact-files: True
-
-2. Project override:
+Remote execution service configuration
+''''''''''''''''''''''''''''''''''''''
+Each of the distinct services are described by the same configuration block,
+which looks like this:
 
 .. code:: yaml
 
-  projects:
-    some_project:
-      remote-execution:
-        execution-service:
-          url: http://execution.some_project.example.com:50051
-          instance-name: main
-        storage-service:
-          url: http://storage.some_project.example.com:11002
-          instance-name: main
-        action-cache-service:
-          url: http://cache.some_project.example.com:50052
-          instance-name: main
+   url: https://storage.fallback.example.com:11002
+   instance-name: main
+   auth:
+     server-cert: /keys/server.crt
+     client-cert: /keys/client.crt
+     client-key: /keys/client.key
+
+**Attributes:**
+
+* ``url``
+
+  Indicates the ``http`` or ``https`` url and optionally the port number of
+  where the service is located.
+
+* ``instance-name``
+
+  The instance name is optional. Instance names separate different shards on
+  the same endpoint (``url``). The instance name should be given to you by the
+  service provider of each service.
+
+  Not all service providers support instance names.
+
+* ``auth``
 
+  The :ref:`authentication attributes <config_remote_auth>` to connect to
+  this server.
+
+
+.. _user_config_project_overrides:
+
+Project specific value
+----------------------
+The ``projects`` key can be used to specify project specific configurations,
+the supported configurations on a project wide basis are listed here.
 
 .. _user_config_strict_mode:
 
diff --git a/doc/source/using_configuring_cache_server.rst b/doc/source/using_configuring_cache_server.rst
index e5755a6..4e9b94d 100644
--- a/doc/source/using_configuring_cache_server.rst
+++ b/doc/source/using_configuring_cache_server.rst
@@ -13,41 +13,13 @@ In addition to the local caches, you can configure one or more remote caches and
 BuildStream will then try to pull a suitable object from one of the remotes,
 falling back to performing a local build or fetching a source if needed.
 
-Configuring BuildStream to use remote caches
---------------------------------------------
-A project will often set up continuous build infrastructure that pushes
-cached objects to a shared cache, so developers working on the project can
-make use of these pre-made objects instead of having to each build the whole
-project locally. The project can declare this cache in its
-project configuration file for :ref:`artifacts <project_essentials_artifacts>`
-and :ref:`sources <project_source_cache>`.
-
-Users can declare additional remote caches in the :ref:`user configuration
-<config_artifacts>`. There are several use cases for this: your project may not
-define its own cache, it may be useful to have a local mirror of its cache, or
-you may have a reason to share artifacts privately.
-
-Remote caches are identified by their URL. There are currently two supported
-protocols:
-
-* ``http``: Pull and push access, without transport-layer security
-* ``https``: Pull and push access, with transport-layer security
-
-BuildStream allows you to configure as many caches as you like, and will query
-them in a specific order:
-
-1. Project-specific overrides in the user config
-2. Project configuration
-3. User configuration
-
-When an an object is created locally, BuildStream will try to push it to all the
-caches which have the ``push: true`` flag set. You can also manually push
-artifacts to a specific cache using the :ref:`bst artifact push command
-<invoking_artifact_push>`.
-
-Objects are identified using the element or sources :ref:`cache key <cachekeys>`
-so the objects provided by a cache should be interchangable with those provided
-by any other cache.
+On the client side, cache servers are declared and configured in
+:ref:`user configuration <config_cache_servers>`, and since it is typical
+for projects to maintain their own cache servers, it is also possible for
+projects to provide recommended :ref:`artifact cache servers <project_artifact_cache>`
+and :ref:`source cache servers <project_source_cache>` through project
+configuration, so that downstream users can download from services
+provided by upstream projects by default.
 
 
 Setting up a remote cache
@@ -173,7 +145,9 @@ Instance with push and requiring client authentication:
    For this scenario, you can add the `--index-only` flag to the above
    commands, and configure BuildStream to store artifact metadata and
    files in a separate caches (e.g. bst-artifact-server and Buildbarn)
-   using :ref:`"types" <project_essentials_split_artifacts>`.
+   using the ``type`` attribute of a :ref:`cache server <config_cache_servers>`
+   configuration.
+
 
 Managing the cache with systemd
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -260,14 +234,15 @@ We can then check if the services are successfully running with:
 For more information on systemd services see: 
 `Creating Systemd Service Files <https://www.devdungeon.com/content/creating-systemd-service-files>`_.
 
+
 Declaring remote caches
 ~~~~~~~~~~~~~~~~~~~~~~~
 Remote caches can be declared within either:
 
-1. The project configuration for :ref:`artifact <project_essentials_artifacts>`
-   and :ref:`sources <project_source_cache>`, or
-2. The user configuration for :ref:`artifacts <config_artifacts>` and
-   :ref:`sources <config_sources>`.
+1. The user configuration for :ref:`artifacts <config_artifact_caches>` and
+   :ref:`sources <config_source_caches>`.
+2. The project configuration for :ref:`artifact <project_artifact_cache>`
+   and :ref:`sources <project_source_cache>`.
 
 Please follow the above links to see examples showing how we declare remote
-caches in both the project configuration and the user configuration, respectively.
+caches in both user configuration the project configuration, respectively.
diff --git a/src/buildstream/plugins/elements/stack.py b/src/buildstream/plugins/elements/stack.py
index bd914ed..bdc9096 100644
--- a/src/buildstream/plugins/elements/stack.py
+++ b/src/buildstream/plugins/elements/stack.py
@@ -76,7 +76,7 @@ To accomplish this, you will need to know the cache key of the stack element
 which was built remotely, possibly by inspecting the remote build log or by
 deriving it with an equally configured BuildStream project, and you will
 need read access to the artifact cache server which the build was uploaded to,
-this should be configured in your :ref:`user configuration file <config_artifacts>`.
+this should be configured in your :ref:`user configuration file <config_artifact_caches>`.
 
 You can then checkout the remotely built stack using the
 :ref:`bst artifact checkout <invoking_artifact_checkout>` command and providing