You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by no...@apache.org on 2020/12/29 12:57:30 UTC

[buildstream] 02/03: cli.py: Extend dependency support for push

This is an automated email from the ASF dual-hosted git repository.

not-in-ldap pushed a commit to branch jennis/deps_for_push
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit bbb5704d8776fc6f92a4776faabbc6352114d744
Author: James Ennis <ja...@codethink.co.uk>
AuthorDate: Thu Aug 29 10:55:52 2019 +0100

    cli.py: Extend dependency support for push
    
    This patch extends the --deps option for bst artifact push
    so that we can now push build/runtime deps.
---
 src/buildstream/_frontend/cli.py |   4 +-
 tests/frontend/push.py           | 161 +++++++++++++++++++++++++++++++++++++++
 2 files changed, 164 insertions(+), 1 deletion(-)

diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index e78f7b4..40d73e3 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -1173,7 +1173,7 @@ def artifact_pull(app, elements, deps, remote):
 ##################################################################
 @artifact.command(name="push", short_help="Push a built artifact")
 @click.option('--deps', '-d', default='none',
-              type=click.Choice(['none', 'all']),
+              type=click.Choice(['none', 'all', 'build', 'run']),
               help='The dependencies to push (default: none)')
 @click.option('--remote', '-r', default=None,
               help="The URL of the remote cache (defaults to the first configured cache)")
@@ -1202,6 +1202,8 @@ def artifact_push(app, artifacts, deps, remote):
     \b
         none:  No dependencies, just the element itself
         all:   All dependencies
+        run:   All runtime dependencies
+        build: All build dependencies
     """
     with app.initialized(session_name="Push"):
         ignore_junction_targets = False
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 4f0fa3c..43a3a91 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -285,6 +285,167 @@ def test_push_all(cli, tmpdir, datafiles):
         assert_shared(cli, share, project, 'import-dev.bst')
         assert_shared(cli, share, project, 'compose-all.bst')
 
+# Tests that `bst artifact push --deps run` pushes all runtime dependencies
+# of the given element.
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_runtime_deps(cli, tmpdir, datafiles):
+    project = str(datafiles)
+    element = "checkout-deps.bst"
+    build_dep = "import-dev.bst"
+    runtime_dep = "import-bin.bst"
+
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+        # First build it without the artifact cache configured
+        result = cli.run(project=project, args=['build', element])
+        result.assert_success()
+
+        # Assert that we are now cached locally
+        for e in [element, build_dep, runtime_dep]:
+            assert cli.get_element_state(project, e) == 'cached'
+
+        # Configure artifact share
+        cli.configure({
+            #
+            # FIXME: This test hangs "sometimes" if we allow
+            #        concurrent push.
+            #
+            #        It's not too bad to ignore since we're
+            #        using the local artifact cache functionality
+            #        only, but it should probably be fixed.
+            #
+            'scheduler': {
+                'pushers': 1
+            },
+            'artifacts': {
+                'url': share.repo,
+                'push': True,
+            }
+        })
+
+        # Now try bst artifact push all the deps
+        result = cli.run(project=project, args=[
+            'artifact', 'push', element,
+            '--deps', 'run'
+        ])
+        result.assert_success()
+
+        # And finally assert that all the artifacts are in the share
+        assert_shared(cli, share, project, element)
+        assert_shared(cli, share, project, runtime_dep)
+        assert_not_shared(cli, share, project, build_dep)
+
+
+# Tests that `bst artifact push --deps build` pushes build dependencies
+# of the given element.
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_build_deps(cli, tmpdir, datafiles):
+    project = str(datafiles)
+    element = "checkout-deps.bst"
+    build_dep = "import-dev.bst"
+    runtime_dep = "import-bin.bst"
+
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+        # First build it without the artifact cache configured
+        result = cli.run(project=project, args=['build', element])
+        result.assert_success()
+
+        # Assert that we are now cached locally
+        for e in [element, build_dep, runtime_dep]:
+            assert cli.get_element_state(project, e) == 'cached'
+
+        # Configure artifact share
+        cli.configure({
+            #
+            # FIXME: This test hangs "sometimes" if we allow
+            #        concurrent push.
+            #
+            #        It's not too bad to ignore since we're
+            #        using the local artifact cache functionality
+            #        only, but it should probably be fixed.
+            #
+            'scheduler': {
+                'pushers': 1
+            },
+            'artifacts': {
+                'url': share.repo,
+                'push': True,
+            }
+        })
+
+        # Now try bst artifact push all the deps
+        result = cli.run(project=project, args=[
+            'artifact', 'push', element,
+            '--deps', 'build'
+        ])
+        result.assert_success()
+
+        # And finally assert that all the artifacts are in the share
+        assert_not_shared(cli, share, project, element)
+        assert_not_shared(cli, share, project, runtime_dep)
+        assert_shared(cli, share, project, build_dep)
+
+
+# Tests `bst artifact push --deps build $artifact_ref`
+@pytest.mark.datafiles(DATA_DIR)
+def test_push_artifacts_build_deps(cli, tmpdir, datafiles):
+    project = str(datafiles)
+    element = 'checkout-deps.bst'
+    build_dep = 'import-dev.bst'
+    runtime_dep = 'import-bin.bst'
+
+    # Configure a local cache
+    local_cache = os.path.join(str(tmpdir), 'cache')
+    cli.configure({'cachedir': local_cache})
+
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as share:
+
+        # First build it without the artifact cache configured
+        result = cli.run(project=project, args=['build', element])
+        result.assert_success()
+
+        # Assert that the *artifact* is cached locally
+        cache_key = cli.get_element_key(project, element)
+        artifact_ref = os.path.join('test', os.path.splitext(element)[0], cache_key)
+        assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', artifact_ref))
+
+        build_dep_cache_key = cli.get_element_key(project, build_dep)
+        build_dep_artifact_ref = os.path.join('test', os.path.splitext(build_dep)[0], build_dep_cache_key)
+        assert os.path.exists(os.path.join(local_cache, 'artifacts', 'refs', build_dep_artifact_ref))
+
+        # Configure artifact share
+        cli.configure({
+            #
+            # FIXME: This test hangs "sometimes" if we allow
+            #        concurrent push.
+            #
+            #        It's not too bad to ignore since we're
+            #        using the local artifact cache functionality
+            #        only, but it should probably be fixed.
+            #
+            'scheduler': {
+                'pushers': 1
+            },
+            'artifacts': {
+                'url': share.repo,
+                'push': True,
+            }
+        })
+
+        # Now try bst artifact push all the deps
+        result = cli.run(project=project, args=[
+            'artifact', 'push', '--deps', 'build', artifact_ref
+        ])
+        result.assert_success()
+
+        # And finally assert that all the artifacts are in the share
+        #
+        # Note that assert shared tests that an element is shared by obtaining
+        # the artifact ref and asserting that the path exists in the share
+        assert_not_shared(cli, share, project, element)
+        assert_not_shared(cli, share, project, runtime_dep)
+        assert_shared(cli, share, project, build_dep)
+
+
 # Tests that `bst artifact push --deps run $artifact_ref` fails
 @pytest.mark.datafiles(DATA_DIR)
 def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):