You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by ro...@apache.org on 2020/12/29 13:45:36 UTC

[buildstream] 03/06: Add `bst push --remote` and `bst pull --remote`

This is an automated email from the ASF dual-hosted git repository.

root pushed a commit to branch tristan/multiple-caches
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 536c7e04326d78b213d1f522f5207a63dab59cbc
Author: Sam Thursfield <sa...@codethink.co.uk>
AuthorDate: Tue Dec 5 18:48:16 2017 +0000

    Add `bst push --remote` and `bst pull --remote`
    
    This allows pushing and pulling from a specific cache, ignoring what is
    configured.
    
    If we choose to add a --remote option to `bst build` in future that
    would now be simple to do.
---
 buildstream/_frontend/main.py | 30 +++++++++++++++++++++---------
 buildstream/_pipeline.py      | 25 +++++++++++++++++++------
 2 files changed, 40 insertions(+), 15 deletions(-)

diff --git a/buildstream/_frontend/main.py b/buildstream/_frontend/main.py
index 117d148..021d2e2 100644
--- a/buildstream/_frontend/main.py
+++ b/buildstream/_frontend/main.py
@@ -216,7 +216,7 @@ def build(app, elements, all, track, track_save, track_all, track_except):
         track = elements
 
     app.initialize(elements, except_=track_except, rewritable=track_save)
-    app.pipeline.initialize(use_remote_cache=True, inconsistent=track)
+    app.pipeline.initialize(use_configured_remote_caches=True, inconsistent=track)
     app.print_heading()
     try:
         app.pipeline.build(app.scheduler, all, track, track_save)
@@ -320,12 +320,18 @@ def track(app, elements, deps, except_):
 @click.option('--deps', '-d', default='none',
               type=click.Choice(['none', 'all']),
               help='The dependency artifacts to pull (default: none)')
+@click.option('--remote', '-r',
+              help="The URL of the remote cache (defaults to the first configured cache)")
 @click.argument('elements', nargs=-1,
                 type=click.Path(dir_okay=False, readable=True))
 @click.pass_obj
-def pull(app, elements, deps):
+def pull(app, elements, deps, remote):
     """Pull a built artifact from the configured remote artifact cache.
 
+    By default the artifact will be pulled one of the configured caches
+    if possible, following the usual priority order. If the `--remote` flag
+    is given, only the specified cache will be queried.
+
     Specify `--deps` to control which artifacts to pull:
 
     \b
@@ -333,7 +339,8 @@ def pull(app, elements, deps):
         all:   All dependencies
     """
     app.initialize(elements)
-    app.pipeline.initialize(use_remote_cache=True)
+    app.pipeline.initialize(use_configured_remote_caches=(remote is None),
+                            add_remote_cache=remote)
     try:
         to_pull = app.pipeline.deps_elements(deps)
         app.pipeline.pull(app.scheduler, to_pull)
@@ -351,11 +358,16 @@ def pull(app, elements, deps):
 @click.option('--deps', '-d', default='none',
               type=click.Choice(['none', 'all']),
               help='The dependencies to push (default: none)')
+@click.option('--remote', '-r', default=None,
+              help="The URL of the remote cache (defaults to the first configured cache)")
 @click.argument('elements', nargs=-1,
                 type=click.Path(dir_okay=False, readable=True))
 @click.pass_obj
-def push(app, elements, deps):
-    """Push a built artifact to the configured remote artifact cache.
+def push(app, elements, deps, remote):
+    """Push a built artifact to a remote artifact cache.
+
+    The default destination is the highest priority configured cache. You can
+    override this by passing a different cache URL with the `--remote` flag.
 
     Specify `--deps` to control which artifacts to push:
 
@@ -364,7 +376,8 @@ def push(app, elements, deps):
         all:   All dependencies
     """
     app.initialize(elements)
-    app.pipeline.initialize(use_remote_cache=True)
+    app.pipeline.initialize(use_configured_remote_caches=(remote is None),
+                            add_remote_cache=remote)
     try:
         to_push = app.pipeline.deps_elements(deps)
         app.pipeline.push(app.scheduler, to_push)
@@ -444,7 +457,7 @@ def show(app, elements, deps, except_, order, format, downloadable):
             $'---------- %{name} ----------\\n%{vars}'
     """
     app.initialize(elements, except_=except_)
-    app.pipeline.initialize(use_remote_cache=downloadable)
+    app.pipeline.initialize(use_configured_remote_caches=downloadable)
     try:
         dependencies = app.pipeline.deps_elements(deps)
     except PipelineError as e:
@@ -780,8 +793,7 @@ class App():
     #
     # Initialize the main pipeline
     #
-    def initialize(self, elements, except_=tuple(), rewritable=False,
-                   inconsistent=False, use_remote_cache=False):
+    def initialize(self, elements, except_=tuple(), rewritable=False):
 
         profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in elements))
 
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 5f0c7e4..dfcd56a 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -94,7 +94,13 @@ class Planner():
 #                         current source refs will not be the effective refs.
 #    rewritable (bool): Whether the loaded files should be rewritable
 #                       this is a bit more expensive due to deep copies
-#    use_remote_cache (bool): Whether to connect with remote artifact cache
+#    use_configured_remote_caches (bool): Whether to connect to configured artifact remotes.
+#    add_remote_cache (str): Adds an additional artifact remote URL, which is
+#                            prepended to the list of remotes (and thus given highest priority).
+#
+# The ticker methods will be called with an element name for each tick, a final
+# tick with None as the argument is passed to signal that processing of this
+# stage has terminated.
 #
 # Raises:
 #    LoadError
@@ -137,7 +143,8 @@ class Pipeline():
         self.targets = resolved_elements[:len(targets)]
         self.exceptions = resolved_elements[len(targets):]
 
-    def initialize(self, use_remote_cache=False, inconsistent=None):
+    def initialize(self, use_configured_remote_caches=False,
+                   add_remote_cache=None, inconsistent=None):
         # Preflight directly, before ever interrogating caches or
         # anything.
         self.preflight()
@@ -146,8 +153,15 @@ class Pipeline():
 
         self.initialize_workspaces()
 
-        if use_remote_cache:
-            self.initialize_remote_caches()
+        # Initialize remote artifact caches. We allow the commandline to override
+        # the user config in some cases (for example `bst push --remote=...`).
+        artifact_urls = []
+        if add_remote_cache:
+            artifact_urls += [add_remote_cache]
+        if use_configured_remote_caches:
+            artifact_urls += configured_artifact_cache_urls(self.context, self.project)
+        if len(artifact_urls) > 0:
+            self.initialize_remote_caches(artifact_urls)
 
         self.resolve_cache_keys(inconsistent)
 
@@ -166,12 +180,11 @@ class Pipeline():
 
                 self.project._set_workspace(element, source, workspace)
 
-    def initialize_remote_caches(self):
+    def initialize_remote_caches(self, artifact_urls):
         def remote_failed(url, error):
             self.message(MessageType.WARN, "Failed to fetch remote refs from {}: {}\n".format(url, error))
 
         with self.timed_activity("Initializing remote caches", silent_nested=True):
-            artifact_urls = configured_artifact_cache_urls(self.context, self.project)
             self.artifacts.set_remotes(artifact_urls, on_failure=remote_failed)
 
     def resolve_cache_keys(self, inconsistent):