You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by no...@apache.org on 2020/12/29 12:55:56 UTC

[buildstream] branch jennis/rebase_fixups created (now 92dbd50)

This is an automated email from the ASF dual-hosted git repository.

not-in-ldap pushed a change to branch jennis/rebase_fixups
in repository https://gitbox.apache.org/repos/asf/buildstream.git.


      at 92dbd50  fixups

This branch includes the following new commits:

     new 787c742  element.py: Promote __update_source_state to API private
     new f57d806  element.py: _fetch_done only updates source state
     new b0578e1  element.py: Remove _update_state from _schedule_tracking
     new 8072bf9  element.py: Split ready_for_runtime check into an API-private method
     new fb68057  CacheKey: Add a CacheKey class which delegates logic to varying implementations
     new 92dbd50  fixups

The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[buildstream] 05/06: CacheKey: Add a CacheKey class which delegates logic to varying implementations

Posted by no...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

not-in-ldap pushed a commit to branch jennis/rebase_fixups
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit fb68057dd2f9bc460c4d23760e973ac5f75d7c6e
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Wed Apr 17 17:32:44 2019 +0100

    CacheKey: Add a CacheKey class which delegates logic to varying implementations
    
    i.e. a CacheKey class, with implementations such as:
    * StrictCacheKey - a normal element when buildstream runs in Strict mode
    * NonStrictCacheKey - a normal element when buildstream runs in
      non-strict mode
    * StrictWorkspacedCacheKey - a workspaced element when Strict mode
    * NonStrictWorkspacedCacheKey - a workspaced element in non-strict mode
    
    (note: only CacheKey and StrictCacheKey are implemented for now)
    
    This involves:
    * Creating the CacheKey and StrictCacheKey classes
    * In Element instantiation, create a __cache_key_obj to delegate various
      logic to.
    * For the moment, extending various functions to call a
      __cache_key_obj's methods if they're implemented.
      * This includesExtending the _KeyStrength enum to include a
        STRICT version, so strict cache keys can be accessed through a
        common interface.
---
 buildstream/_cachekey.py                |  68 --------------
 buildstream/_cachekey/__init__.py       |  22 +++++
 buildstream/_cachekey/cachekey.py       | 147 +++++++++++++++++++++++++++++
 buildstream/_cachekey/strictcachekey.py | 107 +++++++++++++++++++++
 buildstream/_loader/loader.py           |   8 +-
 buildstream/_pipeline.py                |  10 +-
 buildstream/element.py                  | 159 ++++++++++++++++++++++++++++----
 buildstream/types.py                    |   4 +
 8 files changed, 437 insertions(+), 88 deletions(-)

diff --git a/buildstream/_cachekey.py b/buildstream/_cachekey.py
deleted file mode 100644
index e56b582..0000000
--- a/buildstream/_cachekey.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#
-#  Copyright (C) 2018 Codethink Limited
-#
-#  This program is free software; you can redistribute it and/or
-#  modify it under the terms of the GNU Lesser General Public
-#  License as published by the Free Software Foundation; either
-#  version 2 of the License, or (at your option) any later version.
-#
-#  This library is distributed in the hope that it will be useful,
-#  but WITHOUT ANY WARRANTY; without even the implied warranty of
-#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
-#  Lesser General Public License for more details.
-#
-#  You should have received a copy of the GNU Lesser General Public
-#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
-#
-#  Authors:
-#        Tristan Van Berkom <tr...@codethink.co.uk>
-
-
-import hashlib
-
-import ujson
-
-from . import _yaml
-
-# Internal record of the size of a cache key
-_CACHEKEY_SIZE = len(hashlib.sha256().hexdigest())
-
-
-# Hex digits
-_HEX_DIGITS = "0123456789abcdef"
-
-
-# is_key()
-#
-# Check if the passed in string *could be* a cache key.  This basically checks
-# that the length matches a sha256 hex digest, and that the string does not
-# contain any non-hex characters and is fully lower case.
-#
-# Args:
-#    key (str): The string to check
-#
-# Returns:
-#    (bool): Whether or not `key` could be a cache key
-#
-def is_key(key):
-    if len(key) != _CACHEKEY_SIZE:
-        return False
-    return not any(ch not in _HEX_DIGITS for ch in key)
-
-
-# generate_key()
-#
-# Generate an sha256 hex digest from the given value. The value
-# can be a simple value or recursive dictionary with lists etc,
-# anything simple enough to serialize.
-#
-# Args:
-#    value: A value to get a key for
-#
-# Returns:
-#    (str): An sha256 hex digest of the given value
-#
-def generate_key(value):
-    ordered = _yaml.node_sanitize(value)
-    ustring = ujson.dumps(ordered, sort_keys=True, escape_forward_slashes=False).encode('utf-8')
-    return hashlib.sha256(ustring).hexdigest()
diff --git a/buildstream/_cachekey/__init__.py b/buildstream/_cachekey/__init__.py
new file mode 100644
index 0000000..17bab8a
--- /dev/null
+++ b/buildstream/_cachekey/__init__.py
@@ -0,0 +1,22 @@
+#
+#  Copyright (C) 2019 Bloomberg Finance LP
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2 of the License, or (at your option) any later version.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+#  Authors:
+#        Jonathan Maw <jo...@codethink.co.uk>
+
+
+from .cachekey import generate_key, is_key
+from .strictcachekey import StrictCacheKey
diff --git a/buildstream/_cachekey/cachekey.py b/buildstream/_cachekey/cachekey.py
new file mode 100644
index 0000000..d3ae190
--- /dev/null
+++ b/buildstream/_cachekey/cachekey.py
@@ -0,0 +1,147 @@
+#
+#  Copyright (C) 2018 Codethink Limited
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2 of the License, or (at your option) any later version.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+#  Authors:
+#        Tristan Van Berkom <tr...@codethink.co.uk>
+
+
+import hashlib
+
+import ujson
+
+from .. import _yaml
+from .._exceptions import ImplError
+from ..types import _KeyStrength, Scope
+
+# Internal record of the size of a cache key
+_CACHEKEY_SIZE = len(hashlib.sha256().hexdigest())
+
+
+# Hex digits
+_HEX_DIGITS = "0123456789abcdef"
+
+
+# TODO: DOCSTRINGS
+class CacheKey():
+    def __init__(self, element):
+        self._element = element
+        self._weak_key = None
+        self._strict_key = None
+        self._strong_key = None
+        self._weak_cached = None
+        # TODO: Understand why there's no __strict_cached
+        self._strong_cached = None
+
+    # ABSTRACT METHODS
+    def calculate_keys(self):
+        raise ImplError("CacheKey does not implement calculate_keys()")
+
+    def get_key(self, strength):
+        raise ImplError("CacheKey does not implement get_key()")
+
+    def maybe_schedule_assemble(self):
+        raise ImplError("CacheKey does not implement maybe_schedule_assemble()")
+
+    def is_cached(self, strength):
+        raise ImplError("CacheKey does not implement is_cached()")
+
+    def tracking_done(self):
+        raise ImplError("CacheKey does not implement tracking_done()")
+
+    def pull_done(self):
+        raise ImplError("CacheKey does not implement pull_done()")
+
+    def assemble_done(self):
+        raise ImplError("CacheKey does not implement assemble_done()")
+
+    # PRIVATE METHODS
+
+    def _update_weak_cached(self):
+        if self._weak_key and not self._weak_cached:
+            self._weak_cached = self._element._is_key_cached(self._weak_key)
+
+    def _update_strong_cached(self):
+        if self._strict_key and not self._strong_cached:
+            self._strong_cached = self._element._is_key_cached(self._strict_key)
+
+    # Set the weak key
+    def _calculate_weak_key(self):
+        if self._weak_key is None:
+            if self._element.BST_STRICT_REBUILD:
+                deps = [e._get_cache_key(strength=_KeyStrength.WEAK)
+                        for e in self._element.dependencies(Scope.BUILD)]
+            else:
+                deps = [e.name for e in self._element.dependencies(Scope.BUILD, recurse=False)]
+
+            # XXX: Perhaps it would be better to move all cache key calculation
+            #      into CacheKey, and have Element use a function to generate
+            #      the cache_key_dict. Generate, rather than store internally,
+            #      because workspaces could have a different cache_key_dict after
+            #      building.
+            self._weak_key = self._element._calculate_cache_key(deps)
+
+        if self._weak_key is None:
+            return False
+
+        return True
+
+    # Set the strict key
+    def _calculate_strict_key(self):
+        if self._strict_key is None:
+            deps = [e._get_cache_key(strength=_KeyStrength.STRICT)
+                    for e in self._element.dependencies(Scope.BUILD)]
+            self._strict_key = self._element._calculate_cache_key(deps)
+
+        if self._strict_key is None:
+            return False
+
+        return True
+
+
+# is_key()
+#
+# Check if the passed in string *could be* a cache key.  This basically checks
+# that the length matches a sha256 hex digest, and that the string does not
+# contain any non-hex characters and is fully lower case.
+#
+# Args:
+#    key (str): The string to check
+#
+# Returns:
+#    (bool): Whether or not `key` could be a cache key
+#
+def is_key(key):
+    if len(key) != _CACHEKEY_SIZE:
+        return False
+    return not any(ch not in _HEX_DIGITS for ch in key)
+
+
+# generate_key()
+#
+# Generate an sha256 hex digest from the given value. The value
+# can be a simple value or recursive dictionary with lists etc,
+# anything simple enough to serialize.
+#
+# Args:
+#    value: A value to get a key for
+#
+# Returns:
+#    (str): An sha256 hex digest of the given value
+#
+def generate_key(value):
+    ordered = _yaml.node_sanitize(value)
+    ustring = ujson.dumps(ordered, sort_keys=True, escape_forward_slashes=False).encode('utf-8')
+    return hashlib.sha256(ustring).hexdigest()
diff --git a/buildstream/_cachekey/strictcachekey.py b/buildstream/_cachekey/strictcachekey.py
new file mode 100644
index 0000000..e87fb54
--- /dev/null
+++ b/buildstream/_cachekey/strictcachekey.py
@@ -0,0 +1,107 @@
+#
+#  Copyright (C) 2019 Bloomberg Finance LP
+#
+#  This program is free software; you can redistribute it and/or
+#  modify it under the terms of the GNU Lesser General Public
+#  License as published by the Free Software Foundation; either
+#  version 2 of the License, or (at your option) any later version.
+#
+#  This library is distributed in the hope that it will be useful,
+#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the GNU
+#  Lesser General Public License for more details.
+#
+#  You should have received a copy of the GNU Lesser General Public
+#  License along with this library. If not, see <http://www.gnu.org/licenses/>.
+#
+#  Authors:
+#        Jonathan Maw <jo...@codethink.co.uk>
+
+
+from .cachekey import CacheKey
+from ..types import _KeyStrength, Consistency
+
+
+# TODO: DOCSTRINGS
+class StrictCacheKey(CacheKey):
+    def calculate_keys(self):
+        if self._element._get_consistency() == Consistency.INCONSISTENT:
+            return
+
+        if not self._calculate_weak_key():
+            # Failure when calculating weak key
+            # This usually happens when the element is BST_STRICT_REBUILD, and
+            # its dependency is an uncached workspace, or pending track.
+            return
+
+        if not self._calculate_strict_key():
+            # Failure when calculating strict key
+            # Usually because a dependency is pending track or is workspaced
+            # and not cached
+            return
+
+        self._update_strong_cached()
+
+        # TODO: Figure out why _weak_cached is only set if it's identical
+        # NOTE: Elements with no dependencies have identical strict and weak keys.
+        if self._strict_key == self._weak_key:
+            self._update_weak_cached()
+
+        if self._strong_key is None:
+            self._strong_key = self._strict_key
+
+        self._element._check_ready_for_runtime()
+
+    def get_key(self, strength):
+        # NOTE: KeyStrength numbers are not sequential
+        if strength == _KeyStrength.WEAK:
+            return self._weak_key
+        elif strength == _KeyStrength.STRICT:
+            return self._strict_key
+        elif strength == _KeyStrength.STRONG:
+            return self._strong_key
+        else:
+            raise AssertionError("Bad key strength value {}".format(strength))
+
+    def maybe_schedule_assemble(self):
+        # XXX: Should _cached_success take a _KeyStrength?
+        if (self._weak_key and self._strong_key and
+                self._element._is_pending_assembly() and
+                self._element._is_required() and
+                not self._element._cached_success() and
+                not self._element._pull_pending()):
+            self._element._schedule_assemble()
+
+    def is_cached(self, strength):
+        if strength == _KeyStrength.STRONG:
+            return self._strong_cached
+        elif strength == _KeyStrength.STRICT:
+            # TODO: Understand difference between strict cached and strong cached
+            raise AssertionError("I have no idea why it's strong_cached and not strict_cached")
+        elif strength == _KeyStrength.WEAK:
+            return self._weak_cached
+        else:
+            raise AssertionError("Bad key strength value {}".format(strength))
+
+    def tracking_done(self):
+        # this generator includes this corresponding element
+        for element in self._element._reverse_deps_for_update():
+            element._calculate_keys()
+            element._maybe_schedule_assemble()
+
+    def pull_done(self):
+        # Cache keys are already known before this.
+        # Element may become cached.
+        self._update_strong_cached()
+        if self._weak_key == self._strict_key:
+            self._update_weak_cached()
+
+        # If it failed to pull, it should assemble.
+        self._element._maybe_schedule_assemble()
+
+    def assemble_done(self):
+        # Cache keys are already known before this.
+        # Element may become cached.
+        self._update_strong_cached()
+        if self._weak_key == self._strict_key:
+            self._update_weak_cached()
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index 6d8310c..c6a51e6 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -552,7 +552,13 @@ class Loader():
             basedir = sources[0]._get_local_path()
         else:
             # Stage sources
-            element._update_state()
+            # TODO: Remove conditional once implemented wholly
+            if element._Element__cache_key_obj:
+                element._update_source_state()
+                element._calculate_keys()
+            else:
+                element._update_state()
+
             basedir = os.path.join(self.project.directory, ".bst", "staged-junctions",
                                    filename, element._get_cache_key())
             if not os.path.exists(basedir):
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index c176b82..99cbe9e 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -136,7 +136,15 @@ class Pipeline():
                 element._preflight()
 
                 # Determine initial element state.
-                element._update_state()
+                if element._Element__cache_key_obj:
+                    # Ensure consistency of sources
+                    element._update_source_state()
+
+                    element._calculate_keys()
+                    # TODO: maybe schedule assembly if workspaced
+
+                else:
+                    element._update_state()
 
     # dependencies()
     #
diff --git a/buildstream/element.py b/buildstream/element.py
index 8cee859..bd9ccbd 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -95,6 +95,7 @@ from ._exceptions import BstError, LoadError, LoadErrorReason, ImplError, \
 from .utils import UtilError
 from . import utils
 from . import _cachekey
+from ._cachekey import StrictCacheKey
 from . import _signals
 from . import _site
 from ._platform import Platform
@@ -189,9 +190,15 @@ class Element(Plugin):
 
         self.__cache_key_dict = None            # Dict for cache key calculation
         self.__cache_key = None                 # Our cached cache key
+        self.__cache_key_obj = None             # Object for handling cache keys
 
         super().__init__(meta.name, context, project, meta.provenance, "element")
 
+        # TODO: Give a proper name when we've moved the cache keys completely out of element
+        # TODO: Cache the result of _get_workspace
+        if context.get_strict() and not self._get_workspace():
+            self.__cache_key_obj = StrictCacheKey(self)
+
         self.__is_junction = meta.kind == "junction"
 
         if not self.__is_junction:
@@ -1131,10 +1138,16 @@ class Element(Plugin):
     # None is returned if information for the cache key is missing.
     #
     def _get_cache_key(self, strength=_KeyStrength.STRONG):
-        if strength == _KeyStrength.STRONG:
-            return self.__cache_key
+        # TODO: Remove conditional once all implementations are added
+        if self.__cache_key_obj:
+            return self.__cache_key_obj.get_key(strength)
         else:
-            return self.__weak_cache_key
+            if strength == _KeyStrength.STRONG:
+                return self.__cache_key
+            elif strength == _KeyStrength.STRICT:
+                return self.__strict_cache_key
+            else:
+                return self.__weak_cache_key
 
     # _can_query_cache():
     #
@@ -1153,7 +1166,7 @@ class Element(Plugin):
             return True
 
         # cache cannot be queried until strict cache key is available
-        return self.__strict_cache_key is not None
+        return self._get_cache_key(_KeyStrength.STRICT) is not None
 
     # _update_state()
     #
@@ -1231,7 +1244,7 @@ class Element(Plugin):
 
         if self.__strict_cache_key is None:
             dependencies = [
-                e.__strict_cache_key for e in self.dependencies(Scope.BUILD)
+                e._get_cache_key(_KeyStrength.STRICT) for e in self.dependencies(Scope.BUILD)
             ]
             self.__strict_cache_key = self._calculate_cache_key(dependencies)
 
@@ -1314,7 +1327,7 @@ class Element(Plugin):
 
         if not cache_key:
             cache_key = "{:?<64}".format('')
-        elif self._get_cache_key() == self.__strict_cache_key:
+        elif self._get_cache_key() == self._get_cache_key(_KeyStrength.STRICT):
             # Strong cache key used in this session matches cache key
             # that would be used in strict build mode
             dim_key = False
@@ -1402,7 +1415,12 @@ class Element(Plugin):
         self.__tracking_scheduled = False
         self.__tracking_done = True
 
-        self.__update_state_recursively()
+        # TODO: Remove this conditional once implementations are in place
+        if self.__cache_key_obj:
+            self._update_source_state()
+            self.__cache_key_obj.tracking_done()
+        else:
+            self.__update_state_recursively()
 
     # _track():
     #
@@ -1558,14 +1576,17 @@ class Element(Plugin):
         if self.__required:
             # Already done
             return
-
         self.__required = True
 
         # Request artifacts of runtime dependencies
         for dep in self.dependencies(Scope.RUN, recurse=False):
             dep._set_required()
 
-        self._update_state()
+        # TODO: Remove conditional once all implementations are done
+        if self.__cache_key_obj:
+            self.__cache_key_obj.maybe_schedule_assemble()
+        else:
+            self._update_state()
 
     # _is_required():
     #
@@ -1617,7 +1638,13 @@ class Element(Plugin):
         if workspace:
             workspace.invalidate_key()
 
-        self._update_state()
+        # NOTE: Ideally, we won't need to do any state handling here
+        # Currently needed for:
+        # * This is the first time that an uncached, non-strict, can't pull,
+        #    element can determine it's strong cache key
+        # * For workspaces, just set ~everything to None (look at update state now)
+        if not self.__cache_key_obj:
+            self._update_state()
 
     # _assemble_done():
     #
@@ -1632,7 +1659,11 @@ class Element(Plugin):
         self.__assemble_scheduled = False
         self.__assemble_done = True
 
-        self.__update_state_recursively()
+        # TODO: Remove conditional once implementations are done
+        if self.__cache_key_obj:
+            self.__cache_key_obj.assemble_done()
+        else:
+            self.__update_state_recursively()
 
         if self._get_workspace() and self._cached_success():
             assert utils._is_main_process(), \
@@ -1857,11 +1888,12 @@ class Element(Plugin):
         # in user context, as to complete a partial artifact
         subdir, _ = self.__pull_directories()
 
-        if self.__strong_cached and subdir:
+        strong_cached = self.__is_cached(_KeyStrength.STRONG)
+        if strong_cached and subdir:
             # If we've specified a subdir, check if the subdir is cached locally
-            if self.__artifacts.contains_subdir_artifact(self, self.__strict_cache_key, subdir):
+            if self.__artifacts.contains_subdir_artifact(self, self._get_cache_key(_KeyStrength.STRICT), subdir):
                 return False
-        elif self.__strong_cached:
+        elif strong_cached:
             return False
 
         # Pull is pending if artifact remote server available
@@ -1881,7 +1913,11 @@ class Element(Plugin):
     def _pull_done(self):
         self.__pull_done = True
 
-        self.__update_state_recursively()
+        # TODO: Remove conditional when all implementations are done
+        if self.__cache_key_obj:
+            self.__cache_key_obj.pull_done()
+        else:
+            self.__update_state_recursively()
 
     # _pull():
     #
@@ -2314,6 +2350,20 @@ class Element(Plugin):
                 source._update_state()
                 self.__consistency = min(self.__consistency, source._get_consistency())
 
+    # _calculate_keys():
+    #
+    # TODO: DOCSTRING
+    #
+    def _calculate_keys(self):
+        return self.__cache_key_obj.calculate_keys()
+
+    # _maybe_schedule_assemble():
+    #
+    # TODO: DOCSTRING
+    #
+    def _maybe_schedule_assemble(self):
+        self.__cache_key_obj.maybe_schedule_assemble()
+
     # _check_ready_for_runtime():
     #
     # TODO: DOCSTRING
@@ -2323,6 +2373,43 @@ class Element(Plugin):
             self.__ready_for_runtime = all(
                 dep.__ready_for_runtime for dep in self.__runtime_dependencies)
 
+    # _is_key_cached():
+    #
+    # TODO: DOCSTRING
+    #
+    def _is_key_cached(self, key):
+        assert key
+        return self.__artifact.cached(key)
+
+    # _is_pending_assembly():
+    #
+    # TODO: DOCSTRING
+    #
+    def _is_pending_assembly(self):
+        return not self.__assemble_scheduled and not self.__assemble_done
+
+    # _reverse_deps_cachekeys_for_update()
+    #
+    # Yield every reverse dependency that's not ready for runtime
+    #
+    def _reverse_deps_for_update(self):
+        # XXX: Would this be nicer if the CacheKey owned __ready_for_runtime?
+        queue = _UniquePriorityQueue()
+        queue.push(self._unique_id, self)
+
+        while queue:
+            element = queue.pop()
+
+            # If ready, it never becomes unready
+            if element.__ready_for_runtime:
+                continue
+
+            yield element
+
+            # Element readiness changed, maybe rdeps will, too
+            if element.__ready_for_runtime:
+                for rdep in element.__reverse_dependencies:
+                    queue.push(rdep._unique_id, rdep)
 
     #############################################################
     #                   Private Local Methods                   #
@@ -2374,7 +2461,10 @@ class Element(Plugin):
         if keystrength is None:
             keystrength = _KeyStrength.STRONG if self._get_context().get_strict() else _KeyStrength.WEAK
 
-        return self.__strong_cached if keystrength == _KeyStrength.STRONG else self.__weak_cached
+        if self.__cache_key_obj:
+            return self.__cache_key_obj.is_cached(keystrength)
+        else:
+            return self.__strong_cached if keystrength == _KeyStrength.STRONG else self.__weak_cached
 
     # __assert_cached()
     #
@@ -2828,6 +2918,33 @@ class Element(Plugin):
 
         self.__build_result = self.__artifact.load_build_result()
 
+    # TODO: Do we need any of the key logic we introduced?
+    #     # _get_cache_key with _KeyStrength.STRONG returns self.__cache_key, which can be `None`
+    #     # leading to a failed assertion from get_artifact_directory() using get_artifact_name(),
+    #     # so explicility pass the strict cache key
+    #     if keystrength == _KeyStrength.WEAK:
+    #         key = self._get_cache_key(_KeyStrength.WEAK)
+    #     else:
+    #         key = self._get_cache_key(_KeyStrength.STRICT)
+
+    #     self.__build_result = self.__artifact.load_build_result(key)
+
+    # def __get_build_result(self, keystrength):
+    #     if keystrength is None:
+    #         keystrength = _KeyStrength.STRONG if self._get_context().get_strict() else _KeyStrength.WEAK
+
+    #     if self.__build_result is None:
+    #         self.__load_build_result(keystrength)
+
+    #     return self.__build_result
+
+    # def __cached_success(self, keystrength):
+    #     if not self.__is_cached(keystrength=keystrength):
+    #         return False
+
+    #     success, _, _ = self.__get_build_result(keystrength=keystrength)
+    #     return success
+
     def __get_cache_keys_for_commit(self):
         keys = []
 
@@ -2854,7 +2971,7 @@ class Element(Plugin):
     #
     def __pull_strong(self, *, progress=None, subdir=None, excluded_subdirs=None):
         weak_key = self._get_cache_key(strength=_KeyStrength.WEAK)
-        key = self.__strict_cache_key
+        key = self._get_cache_key(strength=_KeyStrength.STRICT)
         if not self.__artifacts.pull(self, key, progress=progress, subdir=subdir,
                                      excluded_subdirs=excluded_subdirs):
             return False
@@ -2944,7 +3061,13 @@ class Element(Plugin):
             element = queue.pop()
 
             old_ready_for_runtime = element.__ready_for_runtime
-            element._update_state()
+            # TODO: Replace this once all cases are implemented
+            if element.__cache_key_obj:
+                element._update_source_state()
+                element.__cache_key_obj.calculate_keys()
+                element.__cache_key_obj.maybe_schedule_assemble()
+            else:
+                element._update_state()
 
             if element.__ready_for_runtime != old_ready_for_runtime:
                 for rdep in element.__reverse_dependencies:
diff --git a/buildstream/types.py b/buildstream/types.py
index d54bf0b..2e37c2f 100644
--- a/buildstream/types.py
+++ b/buildstream/types.py
@@ -127,6 +127,10 @@ class _KeyStrength(Enum):
     # cache keys of dependencies.
     WEAK = 2
 
+    # Includes strict cache keys of all build dependencies and their
+    # runtime dependencies.
+    STRICT = 3
+
 
 # _UniquePriorityQueue():
 #


[buildstream] 04/06: element.py: Split ready_for_runtime check into an API-private method

Posted by no...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

not-in-ldap pushed a commit to branch jennis/rebase_fixups
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 8072bf9f34babde6c300c8d7671bb08bff859d69
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Wed Apr 17 17:18:06 2019 +0100

    element.py: Split ready_for_runtime check into an API-private method
    
    This is done because an external caller will be calling this in certain
    circumstances.
---
 buildstream/element.py | 13 ++++++++++---
 1 file changed, 10 insertions(+), 3 deletions(-)

diff --git a/buildstream/element.py b/buildstream/element.py
index 357ce97..8cee859 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1293,9 +1293,7 @@ class Element(Plugin):
             # Now we have the strong cache key, update the Artifact
             self.__artifact._cache_key = self.__cache_key
 
-        if not self.__ready_for_runtime and self.__cache_key is not None:
-            self.__ready_for_runtime = all(
-                dep.__ready_for_runtime for dep in self.__runtime_dependencies)
+        self._check_ready_for_runtime()
 
     # _get_display_key():
     #
@@ -2316,6 +2314,15 @@ class Element(Plugin):
                 source._update_state()
                 self.__consistency = min(self.__consistency, source._get_consistency())
 
+    # _check_ready_for_runtime():
+    #
+    # TODO: DOCSTRING
+    #
+    def _check_ready_for_runtime(self):
+        if not self.__ready_for_runtime and self._get_cache_key() is not None:
+            self.__ready_for_runtime = all(
+                dep.__ready_for_runtime for dep in self.__runtime_dependencies)
+
 
     #############################################################
     #                   Private Local Methods                   #


[buildstream] 06/06: fixups

Posted by no...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

not-in-ldap pushed a commit to branch jennis/rebase_fixups
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 92dbd508c6d538aebdc430210f0b5883a323a8ce
Author: James Ennis <ja...@codethink.co.uk>
AuthorDate: Thu Apr 18 13:01:06 2019 +0100

    fixups
---
 buildstream/_cachekey/cachekey.py       |  4 ++--
 buildstream/_cachekey/strictcachekey.py |  9 ++++++---
 buildstream/element.py                  | 20 ++++++++++++++++----
 3 files changed, 24 insertions(+), 9 deletions(-)

diff --git a/buildstream/_cachekey/cachekey.py b/buildstream/_cachekey/cachekey.py
index d3ae190..15b0df0 100644
--- a/buildstream/_cachekey/cachekey.py
+++ b/buildstream/_cachekey/cachekey.py
@@ -71,11 +71,11 @@ class CacheKey():
 
     def _update_weak_cached(self):
         if self._weak_key and not self._weak_cached:
-            self._weak_cached = self._element._is_key_cached(self._weak_key)
+            self._weak_cached = self._element._is_weak_cached()
 
     def _update_strong_cached(self):
         if self._strict_key and not self._strong_cached:
-            self._strong_cached = self._element._is_key_cached(self._strict_key)
+            self._strong_cached = self._element._is_strong_cached()
 
     # Set the weak key
     def _calculate_weak_key(self):
diff --git a/buildstream/_cachekey/strictcachekey.py b/buildstream/_cachekey/strictcachekey.py
index e87fb54..e984886 100644
--- a/buildstream/_cachekey/strictcachekey.py
+++ b/buildstream/_cachekey/strictcachekey.py
@@ -40,6 +40,12 @@ class StrictCacheKey(CacheKey):
             # and not cached
             return
 
+        # Assemble the strict artifact
+        self._element._assemble_strict_artifact()
+
+        if self._strong_key is None:
+            self._strong_key = self._strict_key
+
         self._update_strong_cached()
 
         # TODO: Figure out why _weak_cached is only set if it's identical
@@ -47,9 +53,6 @@ class StrictCacheKey(CacheKey):
         if self._strict_key == self._weak_key:
             self._update_weak_cached()
 
-        if self._strong_key is None:
-            self._strong_key = self._strict_key
-
         self._element._check_ready_for_runtime()
 
     def get_key(self, strength):
diff --git a/buildstream/element.py b/buildstream/element.py
index bd9ccbd..9441102 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1308,6 +1308,15 @@ class Element(Plugin):
 
         self._check_ready_for_runtime()
 
+    def _assemble_strict_artifact(self):
+        context = self._get_context()
+        strict = self._get_cache_key(strength=_KeyStrength.STRICT)
+        weak = self._get_cache_key(strength=_KeyStrength.WEAK)
+        self.__strict_artifact = Artifact(self, context, strong_key=strict,
+                                          weak_key=weak)
+        self.__artifact = self.__strict_artifact
+
+
     # _get_display_key():
     #
     # Returns cache keys for display purposes
@@ -2373,13 +2382,16 @@ class Element(Plugin):
             self.__ready_for_runtime = all(
                 dep.__ready_for_runtime for dep in self.__runtime_dependencies)
 
-    # _is_key_cached():
+    # _is_strong_cached():
     #
     # TODO: DOCSTRING
     #
-    def _is_key_cached(self, key):
-        assert key
-        return self.__artifact.cached(key)
+    def _is_strong_cached(self):
+        return self.__strict_artifact.cached()
+
+    def _is_weak_cached(self):
+        return self.__artifact.cached()
+
 
     # _is_pending_assembly():
     #


[buildstream] 03/06: element.py: Remove _update_state from _schedule_tracking

Posted by no...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

not-in-ldap pushed a commit to branch jennis/rebase_fixups
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit b0578e1caf3d3259c9b2305b454db1ee587b69f0
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Wed Apr 17 17:13:51 2019 +0100

    element.py: Remove _update_state from _schedule_tracking
    
    Nothing can happen at this point
---
 buildstream/element.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/buildstream/element.py b/buildstream/element.py
index bdedc50..357ce97 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1393,7 +1393,6 @@ class Element(Plugin):
     #
     def _schedule_tracking(self):
         self.__tracking_scheduled = True
-        self._update_state()
 
     # _tracking_done():
     #


[buildstream] 01/06: element.py: Promote __update_source_state to API private

Posted by no...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

not-in-ldap pushed a commit to branch jennis/rebase_fixups
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 787c742457ddabb663eb4befe845170faf4a8ced
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Wed Apr 17 16:18:53 2019 +0100

    element.py: Promote __update_source_state to API private
    
    There are now times when we want to call _update_source_state outside of
    Element.
---
 buildstream/element.py | 15 ++++++++-------
 1 file changed, 8 insertions(+), 7 deletions(-)

diff --git a/buildstream/element.py b/buildstream/element.py
index 95081b9..5f4215e 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1166,7 +1166,7 @@ class Element(Plugin):
         context = self._get_context()
 
         # Compute and determine consistency of sources
-        self.__update_source_state()
+        self._update_source_state()
 
         if self._get_consistency() == Consistency.INCONSISTENT:
             # Tracking may still be pending
@@ -2283,15 +2283,11 @@ class Element(Plugin):
         else:
             return True
 
-    #############################################################
-    #                   Private Local Methods                   #
-    #############################################################
-
-    # __update_source_state()
+    # _update_source_state()
     #
     # Updates source consistency state
     #
-    def __update_source_state(self):
+    def _update_source_state(self):
 
         # Cannot resolve source state until tracked
         if self.__tracking_scheduled:
@@ -2316,6 +2312,11 @@ class Element(Plugin):
                 source._update_state()
                 self.__consistency = min(self.__consistency, source._get_consistency())
 
+
+    #############################################################
+    #                   Private Local Methods                   #
+    #############################################################
+
     # __can_build_incrementally()
     #
     # Check if the element can be built incrementally, this


[buildstream] 02/06: element.py: _fetch_done only updates source state

Posted by no...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

not-in-ldap pushed a commit to branch jennis/rebase_fixups
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit f57d8066b81fd17c4db516d61822ac5e63d5e6ab
Author: Jonathan Maw <jo...@codethink.co.uk>
AuthorDate: Wed Apr 17 17:12:50 2019 +0100

    element.py: _fetch_done only updates source state
---
 buildstream/element.py | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/buildstream/element.py b/buildstream/element.py
index 5f4215e..bdedc50 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -1835,7 +1835,12 @@ class Element(Plugin):
     def _fetch_done(self):
         # We are not updating the state recursively here since fetching can
         # never end up in updating them.
-        self._update_state()
+
+        # Fetching changes the source state from RESOLVED to CACHED
+        # Fetching cannot change the source state from INCONSISTENT to CACHED because
+        # we prevent fetching when it's INCONSISTENT.
+        # Therefore, only the source state will change.
+        self._update_source_state()
 
     # _pull_pending()
     #