You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by gi...@apache.org on 2020/12/29 13:14:29 UTC

[buildstream] branch jonathan/mirror-client-sourcedownloader-tidy created (now 6696a2e)

This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a change to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git.


      at 6696a2e  patch.py: Fix use of alias overrides

This branch includes the following new commits:

     new 105e102  project: Parse and store mirrors with default mirror set via config or command-line
     new c4294e2  Move _ALIAS_SEPARATOR into utils
     new d02bfe0  Allow translate_url to be overridden with a different alias in Project and Source
     new d417d7e  source.py: Use mirrors when fetching and tracking
     new d5ee579  source.py: Ensure call() and check_output() return SourceErrors
     new a18e637  testutils: Add a helper to copy a testutils repo
     new 8491199  testutils: Make tar and zip sources copy successfully
     new 8c65bf6  tests: Add mirrored fetching and tracking tests
     new 6bb1bb2  loader.py: Fix use of wrong fetch method
     new e971f64  _downloadablefilesource.py: Update to use alias overrides in fetch and track
     new f1eea24  bzr.py: Adapt to use of alias overrides
     new 772174e  git.py: Use SourceDownloaders and alias_overrides
     new afa5c03  tar.py: Update to handle alias overrides
     new 7c69e49  local.py: Update to handle SourceDownloaders and alias overrides
     new 97f3408  ostree.py: Adapt to use of alias overrides
     new 6696a2e  patch.py: Fix use of alias overrides

The 16 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[buildstream] 05/16: source.py: Ensure call() and check_output() return SourceErrors

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit d5ee579c6d99ec578d17f1d5921cf751543123ba
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Mon Jun 11 10:43:08 2018 +0100

    source.py: Ensure call() and check_output() return SourceErrors
    
    They previously returned PluginErrors, inherited from Plugin.
    
    This made it hard to catch them from within source plugins, as they
    don't have access to PluginError.
---
 buildstream/source.py | 88 ++++++++++++++++++++++++++++++++++++++++++++++++++-
 1 file changed, 87 insertions(+), 1 deletion(-)

diff --git a/buildstream/source.py b/buildstream/source.py
index 5c32b4e..6e8a99f 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -74,7 +74,7 @@ from contextlib import contextmanager
 
 from . import Plugin
 from . import _yaml, utils
-from ._exceptions import BstError, ImplError, ErrorDomain
+from ._exceptions import BstError, ImplError, ErrorDomain, PluginError
 from ._projectrefs import ProjectRefStorage
 
 
@@ -393,6 +393,92 @@ class Source(Plugin, SourceDownloader):
         """
         return [self]
 
+    def call(self, *popenargs, fail=None, **kwargs):
+        """A wrapper for subprocess.call()
+
+        Args:
+           popenargs (list): Popen() arguments
+           fail (str): A message to display if the process returns
+                       a non zero exit code
+           rest_of_args (kwargs): Remaining arguments to subprocess.call()
+
+        Returns:
+           (int): The process exit code.
+
+        Raises:
+           (:class:`.PluginError`): If a non-zero return code is received and *fail* is specified
+
+        Note: If *fail* is not specified, then the return value of subprocess.call()
+              is returned even on error, and no exception is automatically raised.
+
+        **Example**
+
+        .. code:: python
+
+          # Call some host tool
+          self.tool = utils.get_host_tool('toolname')
+          self.call(
+              [self.tool, '--download-ponies', self.mirror_directory],
+              "Failed to download ponies from {}".format(
+                  self.mirror_directory))
+        """
+        try:
+            return super().call(*popenargs, fail=fail, **kwargs)
+        except PluginError as e:
+            raise SourceError("{}: {}".format(self, e),
+                              detail=e.detail, reason=e.reason) from e
+
+    def check_output(self, *popenargs, fail=None, **kwargs):
+        """A wrapper for subprocess.check_output()
+
+        Args:
+           popenargs (list): Popen() arguments
+           fail (str): A message to display if the process returns
+                       a non zero exit code
+           rest_of_args (kwargs): Remaining arguments to subprocess.call()
+
+        Returns:
+           (int): The process exit code
+           (str): The process standard output
+
+        Raises:
+           (:class:`.PluginError`): If a non-zero return code is received and *fail* is specified
+
+        Note: If *fail* is not specified, then the return value of subprocess.check_output()
+              is returned even on error, and no exception is automatically raised.
+
+        **Example**
+
+        .. code:: python
+
+          # Get the tool at preflight time
+          self.tool = utils.get_host_tool('toolname')
+
+          # Call the tool, automatically raise an error
+          _, output = self.check_output(
+              [self.tool, '--print-ponies'],
+              "Failed to print the ponies in {}".format(
+                  self.mirror_directory),
+              cwd=self.mirror_directory)
+
+          # Call the tool, inspect exit code
+          exit_code, output = self.check_output(
+              [self.tool, 'get-ref', tracking],
+              cwd=self.mirror_directory)
+
+          if exit_code == 128:
+              return
+          elif exit_code != 0:
+              fmt = "{plugin}: Failed to get ref for tracking: {track}"
+              raise SourceError(
+                  fmt.format(plugin=self, track=tracking)) from e
+        """
+        try:
+            return super().check_output(*popenargs, fail=fail, **kwargs)
+        except PluginError as e:
+            raise SourceError("{}: {}".format(self, e),
+                              detail=e.detail, reason=e.reason) from e
+
     #############################################################
     #            Private Methods used in BuildStream            #
     #############################################################


[buildstream] 02/16: Move _ALIAS_SEPARATOR into utils

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit c4294e24640a360dca9d40dd7ea9f09c759dae95
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Mon Apr 9 16:18:27 2018 +0100

    Move _ALIAS_SEPARATOR into utils
    
    The separator is useful in source files other than _project.py
---
 buildstream/_project.py | 7 ++-----
 buildstream/utils.py    | 4 ++++
 2 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/buildstream/_project.py b/buildstream/_project.py
index 188bf27..110aeec 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -36,9 +36,6 @@ from ._projectrefs import ProjectRefs, ProjectRefStorage
 from ._versions import BST_FORMAT_VERSION
 
 
-# The separator we use for user specified aliases
-_ALIAS_SEPARATOR = ':'
-
 # Project Configuration file
 _PROJECT_CONF_FILE = 'project.conf'
 
@@ -137,8 +134,8 @@ class Project():
     # fully qualified urls based on the shorthand which is allowed
     # to be specified in the YAML
     def translate_url(self, url):
-        if url and _ALIAS_SEPARATOR in url:
-            url_alias, url_body = url.split(_ALIAS_SEPARATOR, 1)
+        if url and utils._ALIAS_SEPARATOR in url:
+            url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
             alias_url = self._aliases.get(url_alias)
             if alias_url:
                 url = alias_url + url_body
diff --git a/buildstream/utils.py b/buildstream/utils.py
index 70759dc..cf0123d 100644
--- a/buildstream/utils.py
+++ b/buildstream/utils.py
@@ -43,6 +43,10 @@ from . import _signals
 from ._exceptions import BstError, ErrorDomain
 
 
+# The separator we use for user specified aliases
+_ALIAS_SEPARATOR = ':'
+
+
 class UtilError(BstError):
     """Raised by utility functions when system calls fail.
 


[buildstream] 11/16: bzr.py: Adapt to use of alias overrides

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit f1eea24bdd60d23e319562125e233d72020a1c53
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Apr 13 16:47:18 2018 +0100

    bzr.py: Adapt to use of alias overrides
---
 buildstream/plugins/sources/bzr.py | 33 ++++++++++++++++++++-------------
 1 file changed, 20 insertions(+), 13 deletions(-)

diff --git a/buildstream/plugins/sources/bzr.py b/buildstream/plugins/sources/bzr.py
index b499d49..653d489 100644
--- a/buildstream/plugins/sources/bzr.py
+++ b/buildstream/plugins/sources/bzr.py
@@ -69,7 +69,6 @@ class BzrSource(Source):
         self.original_url = self.node_get_member(node, str, 'url')
         self.tracking = self.node_get_member(node, str, 'track')
         self.ref = self.node_get_member(node, str, 'ref', None)
-        self.url = self.translate_url(self.original_url)
 
     def preflight(self):
         # Check if bzr is installed, get the binary at the same time.
@@ -96,10 +95,11 @@ class BzrSource(Source):
     def set_ref(self, ref, node):
         node['ref'] = self.ref = ref
 
-    def track(self):
-        with self.timed_activity("Tracking {}".format(self.url),
+    def track(self, alias_override=None):
+        url = self.translate_url(self.original_url, alias_override=alias_override)
+        with self.timed_activity("Tracking {}".format(url),
                                  silent_nested=True):
-            self._ensure_mirror()
+            self._ensure_mirror(url, skip_ref_check=True)
             ret, out = self.check_output([self.host_bzr, "version-info",
                                           "--custom", "--template={revno}",
                                           self._get_branch_dir()],
@@ -110,10 +110,11 @@ class BzrSource(Source):
 
             return out
 
-    def fetch(self):
-        with self.timed_activity("Fetching {}".format(self.url),
+    def fetch(self, alias_override=None):
+        url = self.translate_url(self.original_url, alias_override=alias_override)
+        with self.timed_activity("Fetching {}".format(url),
                                  silent_nested=True):
-            self._ensure_mirror()
+            self._ensure_mirror(url)
 
     def stage(self, directory):
         self.call([self.host_bzr, "checkout", "--lightweight",
@@ -123,7 +124,10 @@ class BzrSource(Source):
                   .format(self.ref, self._get_branch_dir(), directory))
 
     def init_workspace(self, directory):
-        url = os.path.join(self.url, self.tracking)
+        # XXX: init_workspace points the branch at the upstream URL
+        # Even if mirrors exist.
+        url = self.translate_url(self.original_url)
+        branch_url = os.path.join(url, self.tracking)
         with self.timed_activity('Setting up workspace "{}"'.format(directory), silent_nested=True):
             # Checkout from the cache
             self.call([self.host_bzr, "branch",
@@ -134,8 +138,8 @@ class BzrSource(Source):
                       .format(self.ref, self._get_branch_dir(), directory))
             # Switch the parent branch to the source's origin
             self.call([self.host_bzr, "switch",
-                       "--directory={}".format(directory), url],
-                      fail="Failed to switch workspace's parent branch to {}".format(url))
+                       "--directory={}".format(directory), branch_url],
+                      fail="Failed to switch workspace's parent branch to {}".format(branch_url))
 
     def _check_ref(self):
         # If the mirror doesnt exist yet, then we dont have the ref
@@ -209,7 +213,7 @@ class BzrSource(Source):
             yield repodir
             self._atomic_replace_mirrordir(repodir)
 
-    def _ensure_mirror(self):
+    def _ensure_mirror(self, url, skip_ref_check=False):
         with self._atomic_repodir() as repodir:
             # Initialize repo if no metadata
             bzr_metadata_dir = os.path.join(repodir, ".bzr")
@@ -218,18 +222,21 @@ class BzrSource(Source):
                           fail="Failed to initialize bzr repository")
 
             branch_dir = os.path.join(repodir, self.tracking)
+            branch_url = url + "/" + self.tracking
             if not os.path.exists(branch_dir):
                 # `bzr branch` the branch if it doesn't exist
                 # to get the upstream code
-                branch_url = self.url + "/" + self.tracking
                 self.call([self.host_bzr, "branch", branch_url, branch_dir],
                           fail="Failed to branch from {} to {}".format(branch_url, branch_dir))
 
             else:
                 # `bzr pull` the branch if it does exist
                 # to get any changes to the upstream code
-                self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir)],
+                self.call([self.host_bzr, "pull", "--directory={}".format(branch_dir), branch_url],
                           fail="Failed to pull new changes for {}".format(branch_dir))
+        if not skip_ref_check and not self._check_ref():
+            raise SourceError("Failed to ensure ref '{}' was mirrored".format(self.ref),
+                              reason="ref-not-mirrored")
 
 
 def setup():


[buildstream] 09/16: loader.py: Fix use of wrong fetch method

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 6bb1bb211620d87642c84cc689a801d105a25a09
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Jun 22 17:01:58 2018 +0100

    loader.py: Fix use of wrong fetch method
---
 buildstream/_loader/loader.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index 9e4406b..0ae55ca 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -504,7 +504,7 @@ class Loader():
                 if self._fetch_subprojects:
                     if ticker:
                         ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
-                    source.fetch()
+                    source._fetch()
                 else:
                     detail = "Try fetching the project with `bst fetch {}`".format(filename)
                     raise LoadError(LoadErrorReason.SUBPROJECT_FETCH_NEEDED,


[buildstream] 15/16: ostree.py: Adapt to use of alias overrides

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 97f3408bbca0d095dbd50bb9cf0cca12d7a26bd5
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Jun 22 15:06:59 2018 +0100

    ostree.py: Adapt to use of alias overrides
    
    Because an OSTree repo may have its remote changed, the standard
    behaviour of using OSTree.RemoteRepoChange.ADD_IF_NOT_EXISTS isn't
    sufficient.
---
 buildstream/_ostree.py                |  9 +++++++-
 buildstream/plugins/sources/ostree.py | 40 ++++++++++++++++++-----------------
 2 files changed, 29 insertions(+), 20 deletions(-)

diff --git a/buildstream/_ostree.py b/buildstream/_ostree.py
index e40df5f..6c6fadd 100644
--- a/buildstream/_ostree.py
+++ b/buildstream/_ostree.py
@@ -535,8 +535,15 @@ def configure_remote(repo, remote, url, key_url=None):
         options = vd.end()
 
     try:
+        # If it exists, we want to change it, so ADD_IF_NOT_EXISTS is insufficient
         repo.remote_change(None,      # Optional OSTree.Sysroot
-                           OSTree.RepoRemoteChange.ADD_IF_NOT_EXISTS,
+                           OSTree.RepoRemoteChange.DELETE_IF_EXISTS,
+                           remote,    # Remote name
+                           url,       # Remote url
+                           options,   # Remote options
+                           None)      # Optional Gio.Cancellable
+        repo.remote_change(None,      # Optional OSTree.Sysroot
+                           OSTree.RepoRemoteChange.ADD,
                            remote,    # Remote name
                            url,       # Remote url
                            options,   # Remote options
diff --git a/buildstream/plugins/sources/ostree.py b/buildstream/plugins/sources/ostree.py
index c77b3a7..5b03d90 100644
--- a/buildstream/plugins/sources/ostree.py
+++ b/buildstream/plugins/sources/ostree.py
@@ -67,11 +67,10 @@ class OSTreeSource(Source):
         self.node_validate(node, ['url', 'ref', 'track', 'gpg-key'] + Source.COMMON_CONFIG_KEYS)
 
         self.original_url = self.node_get_member(node, str, 'url')
-        self.url = self.translate_url(self.original_url)
         self.ref = self.node_get_member(node, str, 'ref', None)
         self.tracking = self.node_get_member(node, str, 'track', None)
         self.mirror = os.path.join(self.get_mirror_directory(),
-                                   utils.url_directory_name(self.url))
+                                   utils.url_directory_name(self.original_url))
 
         # (optional) Not all repos are signed. But if they are, get the gpg key
         self.gpg_key = self.node_get_member(node, str, 'gpg-key', None)
@@ -97,32 +96,34 @@ class OSTreeSource(Source):
     def set_ref(self, ref, node):
         node['ref'] = self.ref = ref
 
-    def track(self):
+    def track(self, alias_override=None):
         # If self.tracking is not specified its' not an error, just silently return
         if not self.tracking:
             return None
 
-        self.ensure()
+        url = self.translate_url(self.original_url, alias_override=alias_override)
+        self.ensure(url)
         with self.timed_activity("Fetching tracking ref '{}' from origin: {}"
-                                 .format(self.tracking, self.url)):
+                                 .format(self.tracking, url)):
             try:
                 _ostree.fetch(self.repo, ref=self.tracking, progress=self.progress)
             except OSTreeError as e:
                 raise SourceError("{}: Failed to fetch tracking ref '{}' from origin {}\n\n{}"
-                                  .format(self, self.tracking, self.url, e)) from e
+                                  .format(self, self.tracking, url, e)) from e
 
         return _ostree.checksum(self.repo, self.tracking)
 
-    def fetch(self):
-        self.ensure()
+    def fetch(self, alias_override=None):
+        url = self.translate_url(self.original_url, alias_override=alias_override)
+        self.ensure(url)
         if not _ostree.exists(self.repo, self.ref):
             with self.timed_activity("Fetching remote ref: {} from origin: {}"
-                                     .format(self.ref, self.url)):
+                                     .format(self.ref, url)):
                 try:
                     _ostree.fetch(self.repo, ref=self.ref, progress=self.progress)
                 except OSTreeError as e:
                     raise SourceError("{}: Failed to fetch ref '{}' from origin: {}\n\n{}"
-                                      .format(self, self.ref, self.url, e)) from e
+                                      .format(self, self.ref, url, e)) from e
 
     def stage(self, directory):
         self.ensure()
@@ -132,12 +133,12 @@ class OSTreeSource(Source):
             checkoutdir = os.path.join(tmpdir, 'checkout')
 
             with self.timed_activity("Staging ref: {} from origin: {}"
-                                     .format(self.ref, self.url)):
+                                     .format(self.ref, self.original_url)):
                 try:
                     _ostree.checkout(self.repo, checkoutdir, self.ref, user=True)
                 except OSTreeError as e:
                     raise SourceError("{}: Failed to checkout ref '{}' from origin: {}\n\n{}"
-                                      .format(self, self.ref, self.url, e)) from e
+                                      .format(self, self.ref, self.original_url, e)) from e
 
             # The target directory is guaranteed to exist, here we must move the
             # content of out checkout into the existing target directory.
@@ -151,7 +152,7 @@ class OSTreeSource(Source):
                     shutil.move(source_path, directory)
             except (shutil.Error, OSError) as e:
                 raise SourceError("{}: Failed to move ostree checkout {} from '{}' to '{}'\n\n{}"
-                                  .format(self, self.url, tmpdir, directory, e)) from e
+                                  .format(self, self.original_url, tmpdir, directory, e)) from e
 
     def get_consistency(self):
         if self.ref is None:
@@ -165,19 +166,20 @@ class OSTreeSource(Source):
     #
     # Local helpers
     #
-    def ensure(self):
+    def ensure(self, url=None):
         if not self.repo:
-            self.status("Creating local mirror for {}".format(self.url))
-
+            self.status("Creating local mirror")
             self.repo = _ostree.ensure(self.mirror, True)
+
+        if url:
+            self.status("Configuring local mirror for {}".format(url))
             gpg_key = None
             if self.gpg_key_path:
                 gpg_key = 'file://' + self.gpg_key_path
-
             try:
-                _ostree.configure_remote(self.repo, "origin", self.url, key_url=gpg_key)
+                _ostree.configure_remote(self.repo, "origin", url, key_url=gpg_key)
             except OSTreeError as e:
-                raise SourceError("{}: Failed to configure origin {}\n\n{}".format(self, self.url, e)) from e
+                raise SourceError("{}: Failed to configure origin {}\n\n{}".format(self, url, e)) from e
 
     def progress(self, percent, message):
         self.status(message)


[buildstream] 06/16: testutils: Add a helper to copy a testutils repo

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit a18e63716f7d6cc949cc4a48cd68edc6d74715e6
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Apr 13 16:44:13 2018 +0100

    testutils: Add a helper to copy a testutils repo
    
    This is helpful if you want to test what happens when you have one repo
    that has diverged from another. By copying the repo you're sure they
    start with shared history.
    
    This is especially useful when mirroring.
---
 tests/testutils/repo/repo.py | 21 ++++++++++++++++++++-
 1 file changed, 20 insertions(+), 1 deletion(-)

diff --git a/tests/testutils/repo/repo.py b/tests/testutils/repo/repo.py
index 4c9ee59..234aa37 100644
--- a/tests/testutils/repo/repo.py
+++ b/tests/testutils/repo/repo.py
@@ -22,7 +22,7 @@ class Repo():
         # The directory the actual repo will be stored in
         self.repo = os.path.join(self.directory, subdir)
 
-        os.makedirs(self.repo)
+        os.makedirs(self.repo, exist_ok=True)
 
     # create():
     #
@@ -69,3 +69,22 @@ class Repo():
                 shutil.copytree(src_path, dest_path)
             else:
                 shutil.copy2(src_path, dest_path)
+
+    # copy():
+    #
+    # Creates a copy of this repository in the specified
+    # destination.
+    #
+    # Args:
+    #    dest (str): The destination directory
+    #
+    # Returns:
+    #    (Repo): A Repo object for the new repository.
+    def copy(self, dest):
+        subdir = self.repo[len(self.directory):].lstrip(os.sep)
+        new_dir = os.path.join(dest, subdir)
+        os.makedirs(new_dir, exist_ok=True)
+        self.copy_directory(self.repo, new_dir)
+        repo_type = type(self)
+        new_repo = repo_type(dest, subdir)
+        return new_repo


[buildstream] 14/16: local.py: Update to handle SourceDownloaders and alias overrides

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 7c69e4918503522c678623dc30b99cfa1cf5c9b1
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Jun 22 17:40:40 2018 +0100

    local.py: Update to handle SourceDownloaders and alias overrides
---
 buildstream/plugins/sources/local.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/buildstream/plugins/sources/local.py b/buildstream/plugins/sources/local.py
index 673add1..3ac25d6 100644
--- a/buildstream/plugins/sources/local.py
+++ b/buildstream/plugins/sources/local.py
@@ -88,7 +88,7 @@ class LocalSource(Source):
     def set_ref(self, ref, node):
         pass  # pragma: nocover
 
-    def fetch(self):
+    def fetch(self, alias_override=None):
         # Nothing to do here for a local source
         pass  # pragma: nocover
 
@@ -103,6 +103,10 @@ class LocalSource(Source):
                 destfile = os.path.join(directory, os.path.basename(self.path))
                 utils.safe_copy(self.fullpath, destfile)
 
+    def get_alias(self):
+        # Local sources do not need mirrors
+        return None  # pragma: nocover
+
 
 # Create a unique key for a file
 def unique_key(filename):


[buildstream] 04/16: source.py: Use mirrors when fetching and tracking

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit d417d7ed8265fc214e2a0f183df75da821d8f805
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Thu Apr 12 16:37:28 2018 +0100

    source.py: Use mirrors when fetching and tracking
---
 buildstream/__init__.py |   2 +-
 buildstream/source.py   | 190 ++++++++++++++++++++++++++++++++++++++----------
 2 files changed, 154 insertions(+), 38 deletions(-)

diff --git a/buildstream/__init__.py b/buildstream/__init__.py
index 7c4b5e5..ca90e1b 100644
--- a/buildstream/__init__.py
+++ b/buildstream/__init__.py
@@ -30,7 +30,7 @@ if "_BST_COMPLETION" not in os.environ:
     from .utils import UtilError, ProgramNotFoundError
     from .sandbox import Sandbox, SandboxFlags
     from .plugin import Plugin
-    from .source import Source, SourceError, Consistency
+    from .source import Source, SourceError, Consistency, SourceDownloader
     from .element import Element, ElementError, Scope
     from .buildelement import BuildElement
     from .scriptelement import ScriptElement
diff --git a/buildstream/source.py b/buildstream/source.py
index ee0903d..5c32b4e 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -101,6 +101,70 @@ class Consistency():
     """
 
 
+class SourceDownloader():
+    """SourceDownloader()
+
+    This interface exists so that a source that downloads from multiple
+    places (e.g. a git source with submodules) has a consistent interface for
+    fetching and substituting aliases.
+    """
+
+    def track(self, alias_override=None):
+        """Resolve a new ref from the plugin's track option
+
+        Returns:
+           (simple object): A new internal source reference, or None
+
+        If the backend in question supports resolving references from
+        a symbolic tracking branch or tag, then this should be implemented
+        to perform this task on behalf of ``build-stream track`` commands.
+
+        This usually requires fetching new content from a remote origin
+        to see if a new ref has appeared for your branch or tag. If the
+        backend store allows one to query for a new ref from a symbolic
+        tracking data without downloading then that is desirable.
+
+        See :func:`~buildstream.source.Source.get_ref` for a discussion on
+        the *ref* parameter.
+        """
+        # Allow a non implementation
+        return None
+
+    def fetch(self, alias_override=None):
+        """Fetch remote sources and mirror them locally, ensuring at least
+        that the specific reference is cached locally.
+
+        Raises:
+           :class:`.SourceError`
+
+        Implementors should raise :class:`.SourceError` if the there is some
+        network error or if the source reference could not be matched.
+        """
+        raise ImplError("Source downloader '{}' does not implement fetch()".format(type(self)))
+
+    def get_alias(self):
+        """Retrieves the alias used by this downloader, typically by splitting
+        it off the url
+
+        Note that it offers no guarantees that the alias is handled by the project.
+
+        Returns:
+           (str): The alias used by the SourceDownloader
+        """
+        # Guess that an original_url field exists
+        # If not, the source must implement an alternative way of getting the alias.
+        if hasattr(self, 'original_url'):
+            url = getattr(self, 'original_url')
+            if utils._ALIAS_SEPARATOR in url:
+                alias, _ = url.split(utils._ALIAS_SEPARATOR, 1)
+                return alias
+            else:
+                return None
+        else:
+            raise ImplError("Source downloader '{}' is missing original_url "
+                            "and doesn't implement an alternative".format(type(self)))
+
+
 class SourceError(BstError):
     """This exception should be raised by :class:`.Source` implementations
     to report errors to the user.
@@ -114,7 +178,7 @@ class SourceError(BstError):
         super().__init__(message, detail=detail, domain=ErrorDomain.SOURCE, reason=reason)
 
 
-class Source(Plugin):
+class Source(Plugin, SourceDownloader):
     """Source()
 
     Base Source class.
@@ -135,6 +199,7 @@ class Source(Plugin):
         self.__element_kind = meta.element_kind         # The kind of the element owning this source
         self.__directory = meta.directory               # Staging relative directory
         self.__consistency = Consistency.INCONSISTENT   # Cached consistency state
+        self.__meta = meta                              # MetaSource stored so we can copy this source later.
 
         # Collect the composited element configuration and
         # ask the element to configure itself.
@@ -214,39 +279,6 @@ class Source(Plugin):
         """
         raise ImplError("Source plugin '{}' does not implement set_ref()".format(self.get_kind()))
 
-    def track(self):
-        """Resolve a new ref from the plugin's track option
-
-        Returns:
-           (simple object): A new internal source reference, or None
-
-        If the backend in question supports resolving references from
-        a symbolic tracking branch or tag, then this should be implemented
-        to perform this task on behalf of ``build-stream track`` commands.
-
-        This usually requires fetching new content from a remote origin
-        to see if a new ref has appeared for your branch or tag. If the
-        backend store allows one to query for a new ref from a symbolic
-        tracking data without downloading then that is desirable.
-
-        See :func:`~buildstream.source.Source.get_ref` for a discussion on
-        the *ref* parameter.
-        """
-        # Allow a non implementation
-        return None
-
-    def fetch(self):
-        """Fetch remote sources and mirror them locally, ensuring at least
-        that the specific reference is cached locally.
-
-        Raises:
-           :class:`.SourceError`
-
-        Implementors should raise :class:`.SourceError` if the there is some
-        network error or if the source reference could not be matched.
-        """
-        raise ImplError("Source plugin '{}' does not implement fetch()".format(self.get_kind()))
-
     def stage(self, directory):
         """Stage the sources to a directory
 
@@ -341,6 +373,26 @@ class Source(Plugin):
         with utils._tempdir(dir=mirrordir) as tempdir:
             yield tempdir
 
+    def get_source_downloaders(self, alias_override=None):
+        """Get the objects that are used for downloading
+
+        For sources that don't download from multiple URLs, it's
+        usually enough to just return a list containing itself.
+
+        For sources that do download from multiple URLs, the first
+        entry in the list must be the SourceDownloader that is used
+        for tracking (i.e. the URL points at the repository specified
+        by ref)
+
+        Args:
+           (optional) alias_override (str): A URI to use instead of the
+                                            default alias.
+
+        Returns:
+           list: A list of SourceDownloaders
+        """
+        return [self]
+
     #############################################################
     #            Private Methods used in BuildStream            #
     #############################################################
@@ -372,10 +424,27 @@ class Source(Plugin):
     def _get_consistency(self):
         return self.__consistency
 
-    # Wrapper function around plugin provided fetch method
+    # _fetch():
+    #
+    # Tries to fetch from every mirror, falling back on fetching without
+    # mirrors.
     #
     def _fetch(self):
-        self.fetch()
+        project = self._get_project()
+
+        # Use alias overrides to try and get the list of source downloaders
+        # Because some sources (git) need to be able to fetch to get the
+        # source downloaders
+        alias = self.get_alias()
+        uri_list = project.get_alias_uris(alias)
+        downloaders = self.__iterate_uris(uri_list, self.get_source_downloaders,
+                                          "get source downloaders when fetching")
+
+        for downloader in downloaders:
+            alias = downloader.get_alias()
+            uri_list = project.get_alias_uris(alias)
+            self.__iterate_uris(uri_list, downloader.fetch,
+                                "fetch for mirrors of alias '{}'".format(alias))
 
     # Wrapper for stage() api which gives the source
     # plugin a fully constructed path considering the
@@ -582,7 +651,7 @@ class Source(Plugin):
     # Wrapper for track()
     #
     def _track(self):
-        new_ref = self.track()
+        new_ref = self._mirrored_track()
         current_ref = self.get_ref()
 
         if new_ref is None:
@@ -594,6 +663,32 @@ class Source(Plugin):
 
         return new_ref
 
+    # _mirrored_track():
+    #
+    # Tries to track from every mirror, stopping once it succeeds
+    #
+    # Returns:
+    #    (simple object): A new internal source reference, or None
+    def _mirrored_track(self):
+
+        project = self._get_project()
+
+        # Use alias overrides to try and get the list of source downloaders
+        # Because some sources (git) need to be able to fetch to get the
+        # source downloaders
+        alias = self.get_alias()
+        uri_list = reversed(project.get_alias_uris(alias))
+        downloaders = self.__iterate_uris(uri_list, self.get_source_downloaders,
+                                          "get source downloaders when tracking")
+
+        # We only track for the main downloader
+        downloader = downloaders[0]
+
+        # If there are no mirrors or alias, track without overrides.
+        alias = downloader.get_alias()
+        uri_list = reversed(project.get_alias_uris(alias))
+        return self.__iterate_uris(uri_list, downloader.track, "track")
+
     #############################################################
     #                   Local Private Methods                   #
     #############################################################
@@ -630,3 +725,24 @@ class Source(Plugin):
         _yaml.node_final_assertions(config)
 
         return config
+
+    # This will catch SourceErrors and interpret them as a reason to try
+    # the next one
+    #
+    def __iterate_uris(self, uri_list, callback, task_description):
+        errors = []
+        success = False
+        for uri in uri_list:
+            try:
+                retval = callback(alias_override=uri)
+            except SourceError:
+                continue
+            success = True
+            break
+        if not success:
+            if errors:
+                detail = "Errors collected:\n" + "\n".join([str(e) for e in errors])
+            else:
+                detail = None
+            raise SourceError("{}: Failed to {}".format(self, task_description), detail=detail)
+        return retval


[buildstream] 07/16: testutils: Make tar and zip sources copy successfully

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 84911991a653612c43c3b886468e99e7884b3c90
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Mon Jun 11 12:46:19 2018 +0100

    testutils: Make tar and zip sources copy successfully
    
    Unlike other sources, the directory that holds files is not the source.
    The file within that directory is the source.
    Removing subdir by default prevents extraneous "repo" subdirs being
    created.
---
 tests/testutils/repo/tar.py | 4 ++++
 tests/testutils/repo/zip.py | 4 ++++
 2 files changed, 8 insertions(+)

diff --git a/tests/testutils/repo/tar.py b/tests/testutils/repo/tar.py
index ee6cb77..9882d6b 100644
--- a/tests/testutils/repo/tar.py
+++ b/tests/testutils/repo/tar.py
@@ -8,6 +8,10 @@ from .repo import Repo
 
 class Tar(Repo):
 
+    def __init__(self, directory, subdir):
+        # the repo is the file, not the dir it's in, so subdir is removed.
+        super().__init__(directory, '')
+
     def create(self, directory):
         tarball = os.path.join(self.repo, 'file.tar.gz')
 
diff --git a/tests/testutils/repo/zip.py b/tests/testutils/repo/zip.py
index 32a4698..bcacaf3 100644
--- a/tests/testutils/repo/zip.py
+++ b/tests/testutils/repo/zip.py
@@ -8,6 +8,10 @@ from .repo import Repo
 
 class Zip(Repo):
 
+    def __init__(self, directory, subdir):
+        # the repo is the file, not the dir it's in, so subdir is removed.
+        super().__init__(directory, '')
+
     def create(self, directory):
         archive = os.path.join(self.repo, 'file.zip')
 


[buildstream] 08/16: tests: Add mirrored fetching and tracking tests

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 8c65bf67a6d55cfef17971d26ed7ee6d15dbbff3
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Wed Apr 11 17:06:07 2018 +0100

    tests: Add mirrored fetching and tracking tests
---
 .../consistencyerror/plugins/consistencybug.py     |   3 +
 .../consistencyerror/plugins/consistencyerror.py   |   3 +
 tests/frontend/mirror.py                           | 385 +++++++++++++++++++++
 tests/frontend/project/sources/fetch_source.py     |  83 +++++
 4 files changed, 474 insertions(+)

diff --git a/tests/frontend/consistencyerror/plugins/consistencybug.py b/tests/frontend/consistencyerror/plugins/consistencybug.py
index 39eeaa8..dd9bb3c 100644
--- a/tests/frontend/consistencyerror/plugins/consistencybug.py
+++ b/tests/frontend/consistencyerror/plugins/consistencybug.py
@@ -29,6 +29,9 @@ class ConsistencyBugSource(Source):
     def stage(self, directory):
         pass
 
+    def get_alias(self):
+        pass
+
 
 def setup():
     return ConsistencyBugSource
diff --git a/tests/frontend/consistencyerror/plugins/consistencyerror.py b/tests/frontend/consistencyerror/plugins/consistencyerror.py
index 381e9e8..4777368 100644
--- a/tests/frontend/consistencyerror/plugins/consistencyerror.py
+++ b/tests/frontend/consistencyerror/plugins/consistencyerror.py
@@ -30,6 +30,9 @@ class ConsistencyErrorSource(Source):
     def stage(self, directory):
         pass
 
+    def get_alias(self):
+        pass
+
 
 def setup():
     return ConsistencyErrorSource
diff --git a/tests/frontend/mirror.py b/tests/frontend/mirror.py
new file mode 100644
index 0000000..c9ef295
--- /dev/null
+++ b/tests/frontend/mirror.py
@@ -0,0 +1,385 @@
+import os
+import pytest
+
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+
+from buildstream import _yaml
+
+
+# Project directory
+TOP_DIR = os.path.dirname(os.path.realpath(__file__))
+DATA_DIR = os.path.join(TOP_DIR, 'project')
+
+
+def generate_element(output_file):
+    element = {
+        'kind': 'import',
+        'sources': [
+            {
+                'kind': 'fetch_source',
+                "output-text": output_file,
+                "urls": ["foo:repo1", "bar:repo2"],
+                "fetch-succeeds": {
+                    "FOO/repo1": True,
+                    "BAR/repo2": False,
+                    "OOF/repo1": False,
+                    "RAB/repo2": True,
+                    "OFO/repo1": False,
+                    "RBA/repo2": False,
+                    "ooF/repo1": False,
+                    "raB/repo2": False,
+                }
+            }
+        ]
+    }
+    return element
+
+
+def generate_project():
+    project = {
+        'name': 'test',
+        'element-path': 'elements',
+        'aliases': {
+            'foo': 'FOO/',
+            'bar': 'BAR/',
+        },
+        'mirrors': [
+            {
+                'location-name': 'middle-earth',
+                'aliases': {
+                    'foo': ['OOF/'],
+                    'bar': ['RAB/'],
+                },
+            },
+            {
+                'location-name': 'arrakis',
+                'aliases': {
+                    'foo': ['OFO/'],
+                    'bar': ['RBA/'],
+                },
+            },
+            {
+                'location-name': 'oz',
+                'aliases': {
+                    'foo': ['ooF/'],
+                    'bar': ['raB/'],
+                }
+            },
+        ],
+        'plugins': [
+            {
+                'origin': 'local',
+                'path': 'sources',
+                'sources': {
+                    'fetch_source': 0
+                }
+            }
+        ]
+    }
+    return project
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_fetch(cli, tmpdir, datafiles, kind):
+    bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+    dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+    upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+    mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+    project_dir = os.path.join(str(tmpdir), 'project')
+    os.makedirs(project_dir)
+    element_dir = os.path.join(project_dir, 'elements')
+
+    # Create repo objects of the upstream and mirror
+    upstream_repo = create_repo(kind, upstream_repodir)
+    upstream_ref = upstream_repo.create(bin_files_path)
+    mirror_repo = upstream_repo.copy(mirror_repodir)
+    mirror_ref = upstream_ref
+    upstream_ref = upstream_repo.create(dev_files_path)
+
+    element = {
+        'kind': 'import',
+        'sources': [
+            upstream_repo.source_config(ref=upstream_ref)
+        ]
+    }
+    element_name = 'test.bst'
+    element_path = os.path.join(element_dir, element_name)
+    full_repo = element['sources'][0]['url']
+    upstream_map, repo_name = os.path.split(full_repo)
+    alias = 'foo-' + kind
+    aliased_repo = alias + ':' + repo_name
+    element['sources'][0]['url'] = aliased_repo
+    mirror_map, _ = os.path.split(mirror_repo.repo)
+    os.makedirs(element_dir)
+    _yaml.dump(element, element_path)
+
+    project = {
+        'name': 'test',
+        'element-path': 'elements',
+        'aliases': {
+            alias: upstream_map + "/"
+        },
+        'mirrors': [
+            {
+                'location-name': 'middle-earth',
+                'aliases': {
+                    alias: ["file://" + mirror_map + "/"],
+                },
+            },
+        ]
+    }
+    project_file = os.path.join(project_dir, 'project.conf')
+    _yaml.dump(project, project_file)
+
+    # No obvious ways of checking that the mirror has been fetched
+    # But at least we can be sure it succeeds
+    result = cli.run(project=project_dir, args=['fetch', element_name])
+    result.assert_success()
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_mirror_fetch_multi(cli, tmpdir, datafiles):
+    output_file = os.path.join(str(tmpdir), "output.txt")
+    project_dir = str(tmpdir)
+    element_dir = os.path.join(project_dir, 'elements')
+    os.makedirs(element_dir, exist_ok=True)
+    element_name = "test.bst"
+    element_path = os.path.join(element_dir, element_name)
+    element = generate_element(output_file)
+    _yaml.dump(element, element_path)
+
+    project_file = os.path.join(project_dir, 'project.conf')
+    project = generate_project()
+    _yaml.dump(project, project_file)
+
+    result = cli.run(project=project_dir, args=['fetch', element_name])
+    result.assert_success()
+    with open(output_file) as f:
+        contents = f.read()
+        assert "Fetch foo:repo1 succeeded from FOO/repo1" in contents
+        assert "Fetch bar:repo2 succeeded from RAB/repo2" in contents
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_mirror_fetch_default_cmdline(cli, tmpdir, datafiles):
+    output_file = os.path.join(str(tmpdir), "output.txt")
+    project_dir = str(tmpdir)
+    element_dir = os.path.join(project_dir, 'elements')
+    os.makedirs(element_dir, exist_ok=True)
+    element_name = "test.bst"
+    element_path = os.path.join(element_dir, element_name)
+    element = generate_element(output_file)
+    _yaml.dump(element, element_path)
+
+    project_file = os.path.join(project_dir, 'project.conf')
+    project = generate_project()
+    _yaml.dump(project, project_file)
+
+    result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'fetch', element_name])
+    result.assert_success()
+    with open(output_file) as f:
+        contents = f.read()
+        print(contents)
+        # Success if fetching from arrakis' mirror happened before middle-earth's
+        arrakis_str = "OFO/repo1"
+        arrakis_pos = contents.find(arrakis_str)
+        assert arrakis_pos != -1, "'{}' wasn't found".format(arrakis_str)
+        me_str = "OOF/repo1"
+        me_pos = contents.find(me_str)
+        assert me_pos != -1, "'{}' wasn't found".format(me_str)
+        assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_mirror_fetch_default_userconfig(cli, tmpdir, datafiles):
+    output_file = os.path.join(str(tmpdir), "output.txt")
+    project_dir = str(tmpdir)
+    element_dir = os.path.join(project_dir, 'elements')
+    os.makedirs(element_dir, exist_ok=True)
+    element_name = "test.bst"
+    element_path = os.path.join(element_dir, element_name)
+    element = generate_element(output_file)
+    _yaml.dump(element, element_path)
+
+    project_file = os.path.join(project_dir, 'project.conf')
+    project = generate_project()
+    _yaml.dump(project, project_file)
+
+    cli.configure({'default-mirror': 'oz'})
+
+    result = cli.run(project=project_dir, args=['fetch', element_name])
+    result.assert_success()
+    with open(output_file) as f:
+        contents = f.read()
+        print(contents)
+        # Success if fetching from Oz' mirror happened before middle-earth's
+        oz_str = "ooF/repo1"
+        oz_pos = contents.find(oz_str)
+        assert oz_pos != -1, "'{}' wasn't found".format(oz_str)
+        me_str = "OOF/repo1"
+        me_pos = contents.find(me_str)
+        assert me_pos != -1, "'{}' wasn't found".format(me_str)
+        assert oz_pos < me_pos, "'{}' wasn't found before '{}'".format(oz_str, me_str)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_mirror_fetch_default_cmdline_overrides_config(cli, tmpdir, datafiles):
+    output_file = os.path.join(str(tmpdir), "output.txt")
+    project_dir = str(tmpdir)
+    element_dir = os.path.join(project_dir, 'elements')
+    os.makedirs(element_dir, exist_ok=True)
+    element_name = "test.bst"
+    element_path = os.path.join(element_dir, element_name)
+    element = generate_element(output_file)
+    _yaml.dump(element, element_path)
+
+    project_file = os.path.join(project_dir, 'project.conf')
+    project = generate_project()
+    _yaml.dump(project, project_file)
+
+    cli.configure({'default-mirror': 'oz'})
+
+    result = cli.run(project=project_dir, args=['--default-mirror', 'arrakis', 'fetch', element_name])
+    result.assert_success()
+    with open(output_file) as f:
+        contents = f.read()
+        print(contents)
+        # Success if fetching from arrakis' mirror happened before middle-earth's
+        arrakis_str = "OFO/repo1"
+        arrakis_pos = contents.find(arrakis_str)
+        assert arrakis_pos != -1, "'{}' wasn't found".format(arrakis_str)
+        me_str = "OOF/repo1"
+        me_pos = contents.find(me_str)
+        assert me_pos != -1, "'{}' wasn't found".format(me_str)
+        assert arrakis_pos < me_pos, "'{}' wasn't found before '{}'".format(arrakis_str, me_str)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_track_upstream_present(cli, tmpdir, datafiles, kind):
+    bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+    dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+    upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+    mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+    project_dir = os.path.join(str(tmpdir), 'project')
+    os.makedirs(project_dir)
+    element_dir = os.path.join(project_dir, 'elements')
+
+    # Create repo objects of the upstream and mirror
+    upstream_repo = create_repo(kind, upstream_repodir)
+    upstream_ref = upstream_repo.create(bin_files_path)
+    mirror_repo = upstream_repo.copy(mirror_repodir)
+    mirror_ref = upstream_ref
+    upstream_ref = upstream_repo.create(dev_files_path)
+
+    element = {
+        'kind': 'import',
+        'sources': [
+            upstream_repo.source_config(ref=upstream_ref)
+        ]
+    }
+
+    element['sources'][0]
+    element_name = 'test.bst'
+    element_path = os.path.join(element_dir, element_name)
+    full_repo = element['sources'][0]['url']
+    upstream_map, repo_name = os.path.split(full_repo)
+    alias = 'foo-' + kind
+    aliased_repo = alias + ':' + repo_name
+    element['sources'][0]['url'] = aliased_repo
+    mirror_map, _ = os.path.split(mirror_repo.repo)
+    os.makedirs(element_dir)
+    _yaml.dump(element, element_path)
+
+    project = {
+        'name': 'test',
+        'element-path': 'elements',
+        'aliases': {
+            alias: upstream_map + "/"
+        },
+        'mirrors': [
+            {
+                'location-name': 'middle-earth',
+                'aliases': {
+                    alias: ["file://" + mirror_map + "/"],
+                },
+            },
+        ]
+    }
+    project_file = os.path.join(project_dir, 'project.conf')
+    _yaml.dump(project, project_file)
+
+    result = cli.run(project=project_dir, args=['track', element_name])
+    result.assert_success()
+
+    # Tracking tries upstream first. Check the ref is from upstream.
+    new_element = _yaml.load(element_path)
+    source = new_element['sources'][0]
+    if 'ref' in source:
+        assert source['ref'] == upstream_ref
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind):
+    bin_files_path = os.path.join(str(datafiles), 'files', 'bin-files', 'usr')
+    dev_files_path = os.path.join(str(datafiles), 'files', 'dev-files', 'usr')
+    upstream_repodir = os.path.join(str(tmpdir), 'upstream')
+    mirror_repodir = os.path.join(str(tmpdir), 'mirror')
+    project_dir = os.path.join(str(tmpdir), 'project')
+    os.makedirs(project_dir)
+    element_dir = os.path.join(project_dir, 'elements')
+
+    # Create repo objects of the upstream and mirror
+    upstream_repo = create_repo(kind, upstream_repodir)
+    upstream_ref = upstream_repo.create(bin_files_path)
+    mirror_repo = upstream_repo.copy(mirror_repodir)
+    mirror_ref = upstream_ref
+    upstream_ref = upstream_repo.create(dev_files_path)
+
+    element = {
+        'kind': 'import',
+        'sources': [
+            upstream_repo.source_config(ref=upstream_ref)
+        ]
+    }
+
+    element['sources'][0]
+    element_name = 'test.bst'
+    element_path = os.path.join(element_dir, element_name)
+    full_repo = element['sources'][0]['url']
+    upstream_map, repo_name = os.path.split(full_repo)
+    alias = 'foo-' + kind
+    aliased_repo = alias + ':' + repo_name
+    element['sources'][0]['url'] = aliased_repo
+    mirror_map, _ = os.path.split(mirror_repo.repo)
+    os.makedirs(element_dir)
+    _yaml.dump(element, element_path)
+
+    project = {
+        'name': 'test',
+        'element-path': 'elements',
+        'aliases': {
+            alias: 'http://www.example.com/'
+        },
+        'mirrors': [
+            {
+                'location-name': 'middle-earth',
+                'aliases': {
+                    alias: ["file://" + mirror_map + "/"],
+                },
+            },
+        ]
+    }
+    project_file = os.path.join(project_dir, 'project.conf')
+    _yaml.dump(project, project_file)
+
+    result = cli.run(project=project_dir, args=['track', element_name])
+    result.assert_success()
+
+    # Check that tracking fell back to the mirror
+    new_element = _yaml.load(element_path)
+    source = new_element['sources'][0]
+    if 'ref' in source:
+        assert source['ref'] == mirror_ref
diff --git a/tests/frontend/project/sources/fetch_source.py b/tests/frontend/project/sources/fetch_source.py
new file mode 100644
index 0000000..28ee6a6
--- /dev/null
+++ b/tests/frontend/project/sources/fetch_source.py
@@ -0,0 +1,83 @@
+import os
+import sys
+
+from buildstream import Source, Consistency, SourceError, SourceDownloader
+
+# Expected config
+# sources:
+# - output-text: $FILE
+#   urls:
+#   - foo:bar
+#   - baz:quux
+#   fetch-succeeds:
+#     Foo/bar: true
+#     ooF/bar: false
+
+
+class FetchDownloader(SourceDownloader):
+    def __init__(self, source, url):
+        self.source = source
+        self.original_url = url
+
+    def fetch(self, alias_override=None):
+        url = self.source.translate_url(self.original_url, alias_override)
+        with open(self.source.output_file, "a") as f:
+            success = url in self.source.fetch_succeeds and self.source.fetch_succeeds[url]
+            message = "Fetch {} {} from {}\n".format(self.original_url,
+                                                     "succeeded" if success else "failed",
+                                                     url)
+            f.write(message)
+            if not success:
+                raise SourceError("Failed to fetch {}".format(url))
+
+
+class FetchSource(Source):
+    # Read config to know which URLs to fetch
+    def configure(self, node):
+        self.original_urls = self.node_get_member(node, list, 'urls')
+        self.downloaders = [FetchDownloader(self, url) for url in self.original_urls]
+
+        self.output_file = self.node_get_member(node, str, 'output-text')
+        self.fetch_succeeds = {}
+        if 'fetch-succeeds' in node:
+            self.fetch_succeeds = {x[0]: x[1] for x in self.node_items(node['fetch-succeeds'])}
+
+    def get_source_downloaders(self, alias_override=None):
+        return self.downloaders
+
+    def preflight(self):
+        output_dir = os.path.dirname(self.output_file)
+        if not os.path.exists(output_dir):
+            raise SourceError("Directory '{}' does not exist".format(output_dir))
+
+    def get_unique_key(self):
+        return {"urls": self.original_urls, "output_file": self.output_file}
+
+    def get_consistency(self):
+        if not os.path.exists(self.output_file):
+            return Consistency.RESOLVED
+
+        with open(self.output_file, "r") as f:
+            contents = f.read()
+            for url in self.original_urls:
+                if url not in contents:
+                    return Consistency.RESOLVED
+
+        return Consistency.CACHED
+
+    # We dont have a ref, we're a local file...
+    def load_ref(self, node):
+        pass
+
+    def get_ref(self):
+        return None  # pragma: nocover
+
+    def set_ref(self, ref, node):
+        pass  # pragma: nocover
+
+    def get_alias(self):
+        return None
+
+
+def setup():
+    return FetchSource


[buildstream] 10/16: _downloadablefilesource.py: Update to use alias overrides in fetch and track

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit e971f6404cf14065354f8b1bbd7523661e6d52be
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Jun 22 17:01:32 2018 +0100

    _downloadablefilesource.py: Update to use alias overrides in fetch and track
---
 .../plugins/sources/_downloadablefilesource.py     | 29 +++++++++++-----------
 1 file changed, 15 insertions(+), 14 deletions(-)

diff --git a/buildstream/plugins/sources/_downloadablefilesource.py b/buildstream/plugins/sources/_downloadablefilesource.py
index ec9c0fb..44862a9 100644
--- a/buildstream/plugins/sources/_downloadablefilesource.py
+++ b/buildstream/plugins/sources/_downloadablefilesource.py
@@ -18,7 +18,6 @@ class DownloadableFileSource(Source):
     def configure(self, node):
         self.original_url = self.node_get_member(node, str, 'url')
         self.ref = self.node_get_member(node, str, 'ref', None)
-        self.url = self.translate_url(self.original_url)
         self._warn_deprecated_etag(node)
 
     def preflight(self):
@@ -47,24 +46,26 @@ class DownloadableFileSource(Source):
     def set_ref(self, ref, node):
         node['ref'] = self.ref = ref
 
-    def track(self):
+    def track(self, alias_override=None):
         # there is no 'track' field in the source to determine what/whether
         # or not to update refs, because tracking a ref is always a conscious
         # decision by the user.
-        with self.timed_activity("Tracking {}".format(self.url),
+        url = self.translate_url(self.original_url, alias_override=alias_override)
+        with self.timed_activity("Tracking {}".format(url),
                                  silent_nested=True):
-            new_ref = self._ensure_mirror()
+            new_ref = self._ensure_mirror(url)
 
             if self.ref and self.ref != new_ref:
                 detail = "When tracking, new ref differs from current ref:\n" \
-                    + "  Tracked URL: {}\n".format(self.url) \
+                    + "  Tracked URL: {}\n".format(url) \
                     + "  Current ref: {}\n".format(self.ref) \
                     + "  New ref: {}\n".format(new_ref)
                 self.warn("Potential man-in-the-middle attack!", detail=detail)
 
             return new_ref
 
-    def fetch(self):
+    def fetch(self, alias_override=None):
+        url = self.translate_url(self.original_url, alias_override=alias_override)
 
         # Just a defensive check, it is impossible for the
         # file to be already cached because Source.fetch() will
@@ -75,11 +76,11 @@ class DownloadableFileSource(Source):
 
         # Download the file, raise hell if the sha256sums don't match,
         # and mirror the file otherwise.
-        with self.timed_activity("Fetching {}".format(self.url), silent_nested=True):
-            sha256 = self._ensure_mirror()
+        with self.timed_activity("Fetching {}".format(url), silent_nested=True):
+            sha256 = self._ensure_mirror(url)
             if sha256 != self.ref:
                 raise SourceError("File downloaded from {} has sha256sum '{}', not '{}'!"
-                                  .format(self.url, sha256, self.ref))
+                                  .format(url, sha256, self.ref))
 
     def _warn_deprecated_etag(self, node):
         etag = self.node_get_member(node, str, 'etag', None)
@@ -100,12 +101,12 @@ class DownloadableFileSource(Source):
         with utils.save_file_atomic(etagfilename) as etagfile:
             etagfile.write(etag)
 
-    def _ensure_mirror(self):
+    def _ensure_mirror(self, url):
         # Downloads from the url and caches it according to its sha256sum.
         try:
             with self.tempdir() as td:
-                default_name = os.path.basename(self.url)
-                request = urllib.request.Request(self.url)
+                default_name = os.path.basename(url)
+                request = urllib.request.Request(url)
                 request.add_header('Accept', '*/*')
 
                 # We do not use etag in case what we have in cache is
@@ -150,11 +151,11 @@ class DownloadableFileSource(Source):
                 # we would have downloaded.
                 return self.ref
             raise SourceError("{}: Error mirroring {}: {}"
-                              .format(self, self.url, e)) from e
+                              .format(self, url, e)) from e
 
         except (urllib.error.URLError, urllib.error.ContentTooShortError, OSError) as e:
             raise SourceError("{}: Error mirroring {}: {}"
-                              .format(self, self.url, e)) from e
+                              .format(self, url, e)) from e
 
     def _get_mirror_dir(self):
         return os.path.join(self.get_mirror_directory(),


[buildstream] 16/16: patch.py: Fix use of alias overrides

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 6696a2e5353b24b085ae072f14ca29ae2bafac18
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Jun 22 17:41:52 2018 +0100

    patch.py: Fix use of alias overrides
---
 buildstream/plugins/sources/patch.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/buildstream/plugins/sources/patch.py b/buildstream/plugins/sources/patch.py
index 88fb2d5..8beaf4f 100644
--- a/buildstream/plugins/sources/patch.py
+++ b/buildstream/plugins/sources/patch.py
@@ -80,7 +80,7 @@ class PatchSource(Source):
     def set_ref(self, ref, node):
         pass  # pragma: nocover
 
-    def fetch(self):
+    def fetch(self, alias_overide=None):
         # Nothing to do here for a local source
         pass  # pragma: nocover
 


[buildstream] 03/16: Allow translate_url to be overridden with a different alias in Project and Source

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit d02bfe075c89fcc8a0e197888283a84193fecc55
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Jun 22 16:51:14 2018 +0100

    Allow translate_url to be overridden with a different alias in Project
    and Source
---
 buildstream/_project.py | 8 ++++++--
 buildstream/source.py   | 4 ++--
 2 files changed, 8 insertions(+), 4 deletions(-)

diff --git a/buildstream/_project.py b/buildstream/_project.py
index 110aeec..98be8fb 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -133,10 +133,14 @@ class Project():
     # This method is provided for :class:`.Source` objects to resolve
     # fully qualified urls based on the shorthand which is allowed
     # to be specified in the YAML
-    def translate_url(self, url):
+    def translate_url(self, url, alias_override=None):
         if url and utils._ALIAS_SEPARATOR in url:
             url_alias, url_body = url.split(utils._ALIAS_SEPARATOR, 1)
-            alias_url = self._aliases.get(url_alias)
+            if alias_override:
+                alias_url = alias_override
+            else:
+                alias_url = self._aliases.get(url_alias)
+
             if alias_url:
                 url = alias_url + url_body
 
diff --git a/buildstream/source.py b/buildstream/source.py
index 470a407..ee0903d 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -300,7 +300,7 @@ class Source(Plugin):
         os.makedirs(directory, exist_ok=True)
         return directory
 
-    def translate_url(self, url):
+    def translate_url(self, url, alias_override=None):
         """Translates the given url which may be specified with an alias
         into a fully qualified url.
 
@@ -311,7 +311,7 @@ class Source(Plugin):
            str: The fully qualified url, with aliases resolved
         """
         project = self._get_project()
-        return project.translate_url(url)
+        return project.translate_url(url, alias_override)
 
     def get_project_directory(self):
         """Fetch the project base directory


[buildstream] 01/16: project: Parse and store mirrors with default mirror set via config or command-line

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 105e10200140629203b76a34fbb743e823bf8b74
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Mon Apr 9 15:48:50 2018 +0100

    project: Parse and store mirrors with default mirror set via config or
    command-line
    
    In user config (buildstream.conf), it is set with the "default-mirror"
    field.
    
    On the command-line, it is set with "--default-mirror"
---
 buildstream/_context.py          |  6 ++++++
 buildstream/_frontend/app.py     |  3 ++-
 buildstream/_frontend/cli.py     |  2 ++
 buildstream/_project.py          | 41 +++++++++++++++++++++++++++++++++++++---
 tests/completions/completions.py |  1 +
 5 files changed, 49 insertions(+), 4 deletions(-)

diff --git a/buildstream/_context.py b/buildstream/_context.py
index 114ac9e..bd9e458 100644
--- a/buildstream/_context.py
+++ b/buildstream/_context.py
@@ -102,6 +102,9 @@ class Context():
         # What to do when a build fails in non interactive mode
         self.sched_error_action = 'continue'
 
+        # The default mirror to fetch from
+        self.default_mirror = None
+
         # Whether elements must be rebuilt when their dependencies have changed
         self._strict_build_plan = None
 
@@ -154,6 +157,7 @@ class Context():
         _yaml.node_validate(defaults, [
             'sourcedir', 'builddir', 'artifactdir', 'logdir',
             'scheduler', 'artifacts', 'logging', 'projects',
+            'default-mirror',
         ])
 
         for directory in ['sourcedir', 'builddir', 'artifactdir', 'logdir']:
@@ -199,6 +203,8 @@ class Context():
         # Load per-projects overrides
         self._project_overrides = _yaml.node_get(defaults, Mapping, 'projects', default_value={})
 
+        self.default_mirror = _yaml.node_get(defaults, str, "default-mirror", default_value=None)
+
         # Shallow validation of overrides, parts of buildstream which rely
         # on the overrides are expected to validate elsewhere.
         for _, overrides in _yaml.node_items(self._project_overrides):
diff --git a/buildstream/_frontend/app.py b/buildstream/_frontend/app.py
index d30b592..8f6dea7 100644
--- a/buildstream/_frontend/app.py
+++ b/buildstream/_frontend/app.py
@@ -203,7 +203,8 @@ class App():
         # Load the Project
         #
         try:
-            self.project = Project(directory, self.context, cli_options=self._main_options['option'])
+            self.project = Project(directory, self.context, cli_options=self._main_options['option'],
+                                   default_mirror=self._main_options.get('default_mirror'))
         except LoadError as e:
 
             # Let's automatically start a `bst init` session in this case
diff --git a/buildstream/_frontend/cli.py b/buildstream/_frontend/cli.py
index 4651245..b52053c 100644
--- a/buildstream/_frontend/cli.py
+++ b/buildstream/_frontend/cli.py
@@ -217,6 +217,8 @@ def print_version(ctx, param, value):
               help="Elements must be rebuilt when their dependencies have changed")
 @click.option('--option', '-o', type=click.Tuple([str, str]), multiple=True, metavar='OPTION VALUE',
               help="Specify a project option")
+@click.option('--default-mirror', default=None,
+              help="The mirror to fetch from first, before attempting other mirrors")
 @click.pass_context
 def cli(context, **kwargs):
     """Build and manipulate BuildStream projects
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 9f42bf6..188bf27 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -20,7 +20,7 @@
 
 import os
 import multiprocessing  # for cpu_count()
-from collections import Mapping
+from collections import Mapping, OrderedDict
 from pluginbase import PluginBase
 from . import utils
 from . import _cachekey
@@ -71,7 +71,7 @@ class HostMount():
 #
 class Project():
 
-    def __init__(self, directory, context, *, junction=None, cli_options=None):
+    def __init__(self, directory, context, *, junction=None, cli_options=None, default_mirror=None):
 
         # The project name
         self.name = None
@@ -95,6 +95,9 @@ class Project():
         self.base_env_nocache = None             # The base nocache mask (list) for the environment
         self.element_overrides = {}              # Element specific configurations
         self.source_overrides = {}               # Source specific configurations
+        self.mirrors = OrderedDict()             # contains dicts of alias-mappings to URIs.
+
+        self.default_mirror = default_mirror or context.default_mirror  # The name of the preferred mirror.
 
         #
         # Private Members
@@ -203,6 +206,23 @@ class Project():
         self._assert_plugin_format(source, version)
         return source
 
+    # get_alias_uris()
+    #
+    # Yields every URI to replace a given alias with
+    def get_alias_uris(self, alias):
+        if not alias or alias not in self._aliases:
+            return [None]
+
+        mirror_list = []
+        for key, alias_mapping in self.mirrors.items():
+            if alias in alias_mapping:
+                if key == self.default_mirror:
+                    mirror_list = alias_mapping[alias] + mirror_list
+                else:
+                    mirror_list += alias_mapping[alias]
+        mirror_list.append(self._aliases[alias])
+        return mirror_list
+
     # _load():
     #
     # Loads the project configuration file in the project directory.
@@ -250,7 +270,7 @@ class Project():
             'aliases', 'name',
             'artifacts', 'options',
             'fail-on-overlap', 'shell',
-            'ref-storage', 'sandbox'
+            'ref-storage', 'sandbox', 'mirrors',
         ])
 
         # The project name, element path and option declarations
@@ -415,6 +435,21 @@ class Project():
 
             self._shell_host_files.append(mount)
 
+        mirrors = _yaml.node_get(config, list, 'mirrors', default_value=[])
+        for mirror in mirrors:
+            allowed_mirror_fields = [
+                'location-name', 'aliases'
+            ]
+            _yaml.node_validate(mirror, allowed_mirror_fields)
+            mirror_location = _yaml.node_get(mirror, str, 'location-name')
+            alias_mappings = {}
+            for alias_mapping, uris in _yaml.node_items(mirror['aliases']):
+                assert isinstance(uris, list)
+                alias_mappings[alias_mapping] = list(uris)
+            self.mirrors[mirror_location] = alias_mappings
+            if not self.default_mirror:
+                self.default_mirror = mirror_location
+
     # _assert_plugin_format()
     #
     # Helper to raise a PluginError if the loaded plugin is of a lesser version then
diff --git a/tests/completions/completions.py b/tests/completions/completions.py
index 7c169c2..1ff026e 100644
--- a/tests/completions/completions.py
+++ b/tests/completions/completions.py
@@ -27,6 +27,7 @@ MAIN_OPTIONS = [
     "--colors ",
     "--config ",
     "--debug ",
+    "--default-mirror ",
     "--directory ",
     "--error-lines ",
     "--fetchers ",


[buildstream] 13/16: tar.py: Update to handle alias overrides

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit afa5c034e0caf046ed9104a6ad44d704331e81c8
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Fri Jun 22 17:37:39 2018 +0100

    tar.py: Update to handle alias overrides
    
    i.e. self.url was done away with
---
 buildstream/plugins/sources/tar.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/buildstream/plugins/sources/tar.py b/buildstream/plugins/sources/tar.py
index 324006b..d19c9c7 100644
--- a/buildstream/plugins/sources/tar.py
+++ b/buildstream/plugins/sources/tar.py
@@ -75,7 +75,7 @@ class TarSource(DownloadableFileSource):
 
     def preflight(self):
         self.host_lzip = None
-        if self.url.endswith('.lz'):
+        if self.original_url.endswith('.lz'):
             self.host_lzip = utils.get_host_tool('lzip')
 
     def get_unique_key(self):
@@ -96,7 +96,7 @@ class TarSource(DownloadableFileSource):
 
     @contextmanager
     def _get_tar(self):
-        if self.url.endswith('.lz'):
+        if self.original_url.endswith('.lz'):
             with self._run_lzip() as lzip_dec:
                 with tarfile.open(fileobj=lzip_dec, mode='r:') as tar:
                     yield tar


[buildstream] 12/16: git.py: Use SourceDownloaders and alias_overrides

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jonathan/mirror-client-sourcedownloader-tidy
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 772174e5ef9644665626aa8605a2003512973624
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Thu Apr 12 16:28:12 2018 +0100

    git.py: Use SourceDownloaders and alias_overrides
    
    Each GitMirror is now a SourceDownloader
---
 buildstream/plugins/sources/git.py | 123 ++++++++++++++++++-------------------
 1 file changed, 59 insertions(+), 64 deletions(-)

diff --git a/buildstream/plugins/sources/git.py b/buildstream/plugins/sources/git.py
index 44065ad..5fdaba5 100644
--- a/buildstream/plugins/sources/git.py
+++ b/buildstream/plugins/sources/git.py
@@ -75,7 +75,7 @@ from io import StringIO
 
 from configparser import RawConfigParser
 
-from buildstream import Source, SourceError, Consistency
+from buildstream import Source, SourceError, Consistency, SourceDownloader
 from buildstream import utils
 
 GIT_MODULES = '.gitmodules'
@@ -85,18 +85,19 @@ GIT_MODULES = '.gitmodules'
 # for the primary git source and also for each submodule it
 # might have at a given time
 #
-class GitMirror():
+class GitMirror(SourceDownloader):
 
     def __init__(self, source, path, url, ref):
 
         self.source = source
         self.path = path
-        self.url = source.translate_url(url)
+        self.original_url = url
         self.ref = ref
-        self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(self.url))
+        self.mirror = os.path.join(source.get_mirror_directory(), utils.url_directory_name(url))
 
     # Ensures that the mirror exists
-    def ensure(self):
+    def ensure(self, alias_override=None):
+        url = self.source.translate_url(self.original_url, alias_override=alias_override)
 
         # Unfortunately, git does not know how to only clone just a specific ref,
         # so we have to download all of those gigs even if we only need a couple
@@ -109,19 +110,47 @@ class GitMirror():
             # system configured tmpdir is not on the same partition.
             #
             with self.source.tempdir() as tmpdir:
-                self.source.call([self.source.host_git, 'clone', '--mirror', '-n', self.url, tmpdir],
-                                 fail="Failed to clone git repository {}".format(self.url))
+                self.source.call([self.source.host_git, 'clone', '--mirror', '-n', url, tmpdir],
+                                 fail="Failed to clone git repository {}".format(url))
 
                 try:
                     shutil.move(tmpdir, self.mirror)
                 except (shutil.Error, OSError) as e:
                     raise SourceError("{}: Failed to move cloned git repository {} from '{}' to '{}'"
-                                      .format(self.source, self.url, tmpdir, self.mirror)) from e
+                                      .format(self.source, url, tmpdir, self.mirror)) from e
+
+    def fetch(self, alias_override=None):
+        url = self.source.translate_url(self.original_url)
+        with self.source.timed_activity("Fetching {}".format(url), silent_nested=True):
+            self.ensure(alias_override)
+            # NOTE: We fetch from the local source cache if possible, potentially causing
+            #       unexpected results when fetching from mirrors
+            if not self.has_ref():
+                self.source.call([self.source.host_git, 'fetch', url, '--prune'],
+                                 fail="Failed to fetch from remote git repository: {}".format(url),
+                                 cwd=self.mirror)
+            self.assert_ref()
+
+    def track(self, alias_override=None):
+        url = self.source.translate_url(self.original_url, alias_override=alias_override)
 
-    def fetch(self):
-        self.source.call([self.source.host_git, 'fetch', 'origin', '--prune'],
-                         fail="Failed to fetch from remote git repository: {}".format(self.url),
-                         cwd=self.mirror)
+        # If self.tracking is not specified it's not an error, just silently return
+        if not self.source.tracking:
+            return None
+
+        with self.source.timed_activity("Tracking {} from {}"
+                                        .format(self.source.tracking, url),
+                                        silent_nested=True):
+            self.ensure(alias_override=alias_override)
+            if not self.has_ref():
+                self.source.call([self.source.host_git, 'fetch', url, '--prune'],
+                                 fail="Failed to fetch from remote git repository: {}".format(url),
+                                 cwd=self.mirror)
+
+            # Update self.ref and node.ref from the self.tracking branch
+            ret = self.latest_commit(self.source.tracking)
+
+        return ret
 
     def has_ref(self):
         if not self.ref:
@@ -138,7 +167,7 @@ class GitMirror():
     def assert_ref(self):
         if not self.has_ref():
             raise SourceError("{}: expected ref '{}' was not found in git repository: '{}'"
-                              .format(self.source, self.ref, self.url))
+                              .format(self.source, self.ref, self.original_url))
 
     def latest_commit(self, tracking):
         _, output = self.source.check_output(
@@ -161,13 +190,14 @@ class GitMirror():
                          cwd=fullpath)
 
     def init_workspace(self, directory):
+        url = self.source.translate_url(self.original_url)
         fullpath = os.path.join(directory, self.path)
 
         self.source.call([self.source.host_git, 'clone', '--no-checkout', self.mirror, fullpath],
                          fail="Failed to clone git mirror {} in directory: {}".format(self.mirror, fullpath))
 
-        self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', self.url],
-                         fail='Failed to add remote origin "{}"'.format(self.url),
+        self.source.call([self.source.host_git, 'remote', 'set-url', 'origin', url],
+                         fail='Failed to add remote origin "{}"'.format(url),
                          cwd=fullpath)
 
         self.source.call([self.source.host_git, 'checkout', '--force', self.ref],
@@ -238,6 +268,13 @@ class GitMirror():
 
             return None
 
+    def get_alias(self):
+        if utils._ALIAS_SEPARATOR in self.original_url:
+            alias, _ = self.original_url.split(utils._ALIAS_SEPARATOR, 1)
+            return alias
+        else:
+            return None
+
 
 class GitSource(Source):
     # pylint: disable=attribute-defined-outside-init
@@ -308,41 +345,6 @@ class GitSource(Source):
     def set_ref(self, ref, node):
         node['ref'] = self.mirror.ref = ref
 
-    def track(self):
-
-        # If self.tracking is not specified it's not an error, just silently return
-        if not self.tracking:
-            return None
-
-        with self.timed_activity("Tracking {} from {}"
-                                 .format(self.tracking, self.mirror.url),
-                                 silent_nested=True):
-            self.mirror.ensure()
-            self.mirror.fetch()
-
-            # Update self.mirror.ref and node.ref from the self.tracking branch
-            ret = self.mirror.latest_commit(self.tracking)
-
-        return ret
-
-    def fetch(self):
-
-        with self.timed_activity("Fetching {}".format(self.mirror.url), silent_nested=True):
-
-            # Here we are only interested in ensuring that our mirror contains
-            # the self.mirror.ref commit.
-            self.mirror.ensure()
-            if not self.mirror.has_ref():
-                self.mirror.fetch()
-
-            self.mirror.assert_ref()
-
-            # Here after performing any fetches, we need to also ensure that
-            # we've cached the desired refs in our mirrors of submodules.
-            #
-            self.refresh_submodules()
-            self.fetch_submodules()
-
     def init_workspace(self, directory):
         # XXX: may wish to refactor this as some code dupe with stage()
         self.refresh_submodules()
@@ -363,7 +365,8 @@ class GitSource(Source):
 
         # Stage the main repo in the specified directory
         #
-        with self.timed_activity("Staging {}".format(self.mirror.url), silent_nested=True):
+        url = self.translate_url(self.mirror.original_url)
+        with self.timed_activity("Staging {}".format(url), silent_nested=True):
             self.mirror.stage(directory)
             for mirror in self.submodules:
                 if mirror.path in self.submodule_checkout_overrides:
@@ -374,6 +377,11 @@ class GitSource(Source):
                 if checkout:
                     mirror.stage(directory)
 
+    def get_source_downloaders(self, alias_override=None):
+        self.mirror.ensure(alias_override=alias_override)
+        self.refresh_submodules()
+        return [self.mirror] + self.submodules
+
     ###########################################################
     #                     Local Functions                     #
     ###########################################################
@@ -416,19 +424,6 @@ class GitSource(Source):
 
         self.submodules = submodules
 
-    # Ensures that we have mirrored git repositories for all
-    # the submodules existing at the given commit of the main git source.
-    #
-    # Also ensure that these mirrors have the required commits
-    # referred to at the given commit of the main git source.
-    #
-    def fetch_submodules(self):
-        for mirror in self.submodules:
-            mirror.ensure()
-            if not mirror.has_ref():
-                mirror.fetch()
-                mirror.assert_ref()
-
 
 # Plugin entry point
 def setup():