You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@buildstream.apache.org by gi...@apache.org on 2020/12/29 13:13:45 UTC

[buildstream] branch valentindavid/flatpak-demo created (now 6716a56)

This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a change to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git.


      at 6716a56  Inject environment to bwrap through its command line

This branch includes the following new commits:

     new 2eaff21  Move tests.frontend.generate_junction to test.testutils
     new 0793cb4  Pass targets to Loader.load() instead of its constructor.
     new ea7bb36  Make Project owner of Loader.
     new 7305ae9  Extract plugin collection
     new 292766d  Add support for include '(@)' in project.conf and .bst files
     new d50ebc6  Add documentation for include directive.
     new 420054b  Rename BST_NO_PROJECT_DEFAULTS to (not) BST_PROJECT_INCLUDES_PROCESSED
     new f3ed208  buildstream/_options/optionpool.py: Fix paramaters of OptionPool.load_cli_values
     new d97b2c1  buildstream/_project.py: Document Project._load_pass.
     new c31e5a1  buildstream/_includes.py: Cache loaded fragments.
     new f06c2ce  buildstream/_includes.py: Remove validation of fragments.
     new 795df4e  Forbid inline tracking on fragments coming from junctions
     new 7c4a1fe  tests/frontend/track.py: Add tests for tracking on included fragments.
     new da42c7d  tests/format/include.py: Test use of conditionals in included fragment.
     new f221dde  Drop BST_PROJECT_INCLUDES_PROCESSED and use kind to detect junctions.
     new 63aecde  Move loading and cleaning of elements from Pipeline to Project.
     new 195b580  Adding missing test data.
     new 4789c0f  Detect recursive includes.
     new 37b1a77  doc/source/formatintro.rst: Use references where possible in Include section.
     new 7eccd56  buildstream/element.py: Convert an exception to an assert.
     new ddeb066  Fix issue when with including in sub-nodes.
     new 5b64442  Add missing files
     new de2e7fe  Make include path relative to project the including fragment is found.
     new 1970bb1  Fix indentation
     new 356a634  Fix 'first pass config' behavior for loading elements
     new 9df849d  Delay full load of project for tracking
     new 52b6b40  Always process local includes.
     new e3756d4  Inverse priority of in include composition
     new 6028168  Give less priority to projectdata.yml than includes
     new 6716a56  Inject environment to bwrap through its command line

The 30 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[buildstream] 26/30: Delay full load of project for tracking

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 9df849dfa282e124b876e1ba869023f1eecd1397
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Mon Jul 2 11:55:15 2018 +0200

    Delay full load of project for tracking
---
 buildstream/_artifactcache/artifactcache.py |  1 +
 buildstream/_frontend/widget.py             |  7 ++-
 buildstream/_loader/loader.py               |  5 +-
 buildstream/_project.py                     | 80 +++++++++++++++++------------
 buildstream/element.py                      | 12 ++---
 tests/artifactcache/config.py               |  1 +
 tests/format/include/recursive/element.bst  |  1 +
 7 files changed, 62 insertions(+), 45 deletions(-)

diff --git a/buildstream/_artifactcache/artifactcache.py b/buildstream/_artifactcache/artifactcache.py
index 2d745f8..8e51a64 100644
--- a/buildstream/_artifactcache/artifactcache.py
+++ b/buildstream/_artifactcache/artifactcache.py
@@ -125,6 +125,7 @@ class ArtifactCache():
             has_remote_caches = True
         if use_config:
             for project in self.context.get_projects():
+                project.ensure_fully_loaded()
                 artifact_caches = _configured_remote_artifact_cache_specs(self.context, project)
                 if artifact_caches:  # artifact_caches is a list of ArtifactCacheSpec instances
                     self._set_remotes(artifact_caches, project=project)
diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py
index cad1a28..61fd6cd 100644
--- a/buildstream/_frontend/widget.py
+++ b/buildstream/_frontend/widget.py
@@ -480,8 +480,11 @@ class LogLine(Widget):
             text += '\n'
 
         # Plugins
-        text += self._format_plugins(project.plugins._element_factory.loaded_dependencies,
-                                     project.plugins._source_factory.loaded_dependencies)
+        text += self._format_plugins(project.first_pass_config.plugins._element_factory.loaded_dependencies,
+                                     project.first_pass_config.plugins._source_factory.loaded_dependencies)
+        if project.plugins:
+            text += self._format_plugins(project.plugins._element_factory.loaded_dependencies,
+                                         project.plugins._source_factory.loaded_dependencies)
 
         # Pipeline state
         text += self.content_profile.fmt("Pipeline\n", bold=True)
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index bc43180..4a76b03 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -249,10 +249,7 @@ class Loader():
         if kind == "junction":
             self._first_pass_options.process_node(node)
         else:
-            if not self.project.is_loaded():
-                raise LoadError(LoadErrorReason.INVALID_DATA,
-                                "{}: Cannot pre-load. Element depends on project defaults."
-                                .format(filename))
+            self.project.ensure_fully_loaded()
 
             self._includes.process(node)
 
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 2b9d637..5156539 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -270,13 +270,16 @@ class Project():
 
         self._context.add_project(self)
 
-        self._loaded = False
+        self._partially_loaded = False
+        self._fully_loaded = False
+        self._project_includes = None
+        self._config_node = None
 
         profile_start(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
         self._load(parent_loader=parent_loader, tempdir=tempdir)
         profile_end(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
 
-        self._loaded = True
+        self._partially_loaded = True
 
     @property
     def plugins(self):
@@ -298,8 +301,6 @@ class Project():
     def source_overrides(self):
         return self.config.source_overrides
 
-    def is_loaded(self):
-        return self._loaded
 
     # translate_url():
     #
@@ -415,7 +416,7 @@ class Project():
 
         # Load builtin default
         projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
-        config = _yaml.load(_site.default_project_config)
+        self._config_node = _yaml.load(_site.default_project_config)
 
         # Load project local config and override the builtin
         try:
@@ -424,10 +425,10 @@ class Project():
             # Raise a more specific error here
             raise LoadError(LoadErrorReason.MISSING_PROJECT_CONF, str(e))
 
-        _yaml.composite(config, project_conf)
+        _yaml.composite(self._config_node, project_conf)
 
         # Assert project's format version early, before validating toplevel keys
-        format_version = _yaml.node_get(config, int, 'format-version')
+        format_version = _yaml.node_get(self._config_node, int, 'format-version')
         if BST_FORMAT_VERSION < format_version:
             major, minor = utils.get_bst_version()
             raise LoadError(
@@ -437,15 +438,15 @@ class Project():
 
         # The project name, element path and option declarations
         # are constant and cannot be overridden by option conditional statements
-        self.name = _yaml.node_get(config, str, 'name')
+        self.name = _yaml.node_get(self._config_node, str, 'name')
 
         # Validate that project name is a valid symbol name
-        _yaml.assert_symbol_name(_yaml.node_get_provenance(config, 'name'),
+        _yaml.assert_symbol_name(_yaml.node_get_provenance(self._config_node, 'name'),
                                  self.name, "project name")
 
         self.element_path = os.path.join(
             self.directory,
-            _yaml.node_get(config, str, 'element-path')
+            _yaml.node_get(self._config_node, str, 'element-path')
         )
 
         self.config.options = OptionPool(self.element_path)
@@ -455,18 +456,38 @@ class Project():
                              parent=parent_loader,
                              tempdir=tempdir)
 
-        project_includes = Includes(self.loader)
+        self._project_includes = Includes(self.loader)
 
-        config_no_include = _yaml.node_copy(config)
-        project_includes.ignore_includes(config_no_include)
+        config_no_include = _yaml.node_copy(self._config_node)
+        self._project_includes.ignore_includes(config_no_include)
 
         self._load_pass(config_no_include, self.first_pass_config, True)
 
-        project_includes.process(config)
+        # Use separate file for storing source references
+        self.ref_storage = _yaml.node_get(self._config_node, str, 'ref-storage')
+        if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
+            p = _yaml.node_get_provenance(self._config_node, 'ref-storage')
+            raise LoadError(LoadErrorReason.INVALID_DATA,
+                            "{}: Invalid value '{}' specified for ref-storage"
+                            .format(p, self.ref_storage))
+
+        if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
+            self.junction_refs.load(self.first_pass_config.options)
+
+    def ensure_fully_loaded(self):
+        if self._fully_loaded:
+            return
+        assert self._partially_loaded
+        self._fully_loaded = True
+
+        if self.junction:
+            self.junction._get_project().ensure_fully_loaded()
+
+        self._project_includes.process(self._config_node)
 
-        self._load_pass(config, self.config, False)
+        self._load_pass(self._config_node, self.config, False)
 
-        _yaml.node_validate(config, self.INCLUDE_CONFIG_KEYS + self.MAIN_FILE_CONFIG_KEYS)
+        _yaml.node_validate(self._config_node, self.INCLUDE_CONFIG_KEYS + self.MAIN_FILE_CONFIG_KEYS)
 
         #
         # Now all YAML composition is done, from here on we just load
@@ -474,39 +495,30 @@ class Project():
         #
 
         # Load artifacts pull/push configuration for this project
-        self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config)
+        self.artifact_cache_specs = ArtifactCache.specs_from_config_node(self._config_node)
 
         # Source url aliases
-        self._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
+        self._aliases = _yaml.node_get(self._config_node, Mapping, 'aliases', default_value={})
 
         # Load sandbox environment variables
-        self.base_environment = _yaml.node_get(config, Mapping, 'environment')
-        self.base_env_nocache = _yaml.node_get(config, list, 'environment-nocache')
+        self.base_environment = _yaml.node_get(self._config_node, Mapping, 'environment')
+        self.base_env_nocache = _yaml.node_get(self._config_node, list, 'environment-nocache')
 
         # Load sandbox configuration
-        self._sandbox = _yaml.node_get(config, Mapping, 'sandbox')
+        self._sandbox = _yaml.node_get(self._config_node, Mapping, 'sandbox')
 
         # Load project split rules
-        self._splits = _yaml.node_get(config, Mapping, 'split-rules')
+        self._splits = _yaml.node_get(self._config_node, Mapping, 'split-rules')
 
         # Fail on overlap
-        self.fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap')
-
-        # Use separate file for storing source references
-        self.ref_storage = _yaml.node_get(config, str, 'ref-storage')
-        if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
-            p = _yaml.node_get_provenance(config, 'ref-storage')
-            raise LoadError(LoadErrorReason.INVALID_DATA,
-                            "{}: Invalid value '{}' specified for ref-storage"
-                            .format(p, self.ref_storage))
+        self.fail_on_overlap = _yaml.node_get(self._config_node, bool, 'fail-on-overlap')
 
         # Load project.refs if it exists, this may be ignored.
         if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
             self.refs.load(self.options)
-            self.junction_refs.load(self.options)
 
         # Parse shell options
-        shell_options = _yaml.node_get(config, Mapping, 'shell')
+        shell_options = _yaml.node_get(self._config_node, Mapping, 'shell')
         _yaml.node_validate(shell_options, ['command', 'environment', 'host-files'])
         self._shell_command = _yaml.node_get(shell_options, list, 'command')
 
@@ -535,6 +547,8 @@ class Project():
 
             self._shell_host_files.append(mount)
 
+        self._config_node = None
+
     # _load_pass():
     #
     # Loads parts of the project configuration that are different
diff --git a/buildstream/element.py b/buildstream/element.py
index aa49484..62e6554 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -197,9 +197,8 @@ class Element(Plugin):
 
         self.__is_junction = meta.kind == "junction"
 
-        if not project.is_loaded() and not self.__is_junction:
-            raise ElementError("{}: Cannot load element before project"
-                               .format(self), reason="project-not-loaded")
+        if not self.__is_junction:
+            project.ensure_fully_loaded()
 
         self.normal_name = os.path.splitext(self.name.replace(os.sep, '-'))[0]
         """A normalized element name
@@ -896,6 +895,7 @@ class Element(Plugin):
         if meta.first_pass:
             plugins = meta.project.first_pass_config.plugins
         else:
+            meta.project.ensure_fully_loaded()
             plugins = meta.project.plugins
 
         if meta in cls.__instantiated_elements:
@@ -2184,7 +2184,7 @@ class Element(Plugin):
             project_nocache = []
         else:
             project = self._get_project()
-            assert project.is_loaded()
+            project.ensure_fully_loaded()
             project_nocache = project.base_env_nocache
 
         default_nocache = _yaml.node_get(self.__defaults, list, 'environment-nocache', default_value=[])
@@ -2207,7 +2207,7 @@ class Element(Plugin):
         if self.__is_junction:
             variables = _yaml.node_chain_copy(project.first_pass_config.base_variables)
         else:
-            assert project.is_loaded()
+            project.ensure_fully_loaded()
             variables = _yaml.node_chain_copy(project.base_variables)
 
         _yaml.composite(variables, default_vars)
@@ -2238,7 +2238,7 @@ class Element(Plugin):
                               'build-gid': 0}
         else:
             project = self._get_project()
-            assert project.is_loaded()
+            project.ensure_fully_loaded()
             sandbox_config = _yaml.node_chain_copy(project._sandbox)
 
         # The default config is already composited with the project overrides
diff --git a/tests/artifactcache/config.py b/tests/artifactcache/config.py
index 079e511..f594747 100644
--- a/tests/artifactcache/config.py
+++ b/tests/artifactcache/config.py
@@ -98,6 +98,7 @@ def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user
     context = Context()
     context.load(config=user_config_file)
     project = Project(str(project_dir), context)
+    project.ensure_fully_loaded()
 
     # Use the helper from the artifactcache module to parse our configuration.
     parsed_cache_specs = _configured_remote_artifact_cache_specs(context, project)
diff --git a/tests/format/include/recursive/element.bst b/tests/format/include/recursive/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/recursive/element.bst
@@ -0,0 +1 @@
+kind: manual


[buildstream] 04/30: Extract plugin collection

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 7305ae9d6f34ccd5e42f642ed3371e85bb1af88b
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Mon Jun 11 17:55:53 2018 +0200

    Extract plugin collection
---
 buildstream/_frontend/widget.py |   4 +-
 buildstream/_project.py         | 259 +++++++++++++++++++++-------------------
 buildstream/element.py          |   7 +-
 3 files changed, 142 insertions(+), 128 deletions(-)

diff --git a/buildstream/_frontend/widget.py b/buildstream/_frontend/widget.py
index dab8cab..cad1a28 100644
--- a/buildstream/_frontend/widget.py
+++ b/buildstream/_frontend/widget.py
@@ -480,8 +480,8 @@ class LogLine(Widget):
             text += '\n'
 
         # Plugins
-        text += self._format_plugins(project._element_factory.loaded_dependencies,
-                                     project._source_factory.loaded_dependencies)
+        text += self._format_plugins(project.plugins._element_factory.loaded_dependencies,
+                                     project.plugins._source_factory.loaded_dependencies)
 
         # Pipeline state
         text += self.content_profile.fmt("Pipeline\n", bold=True)
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 36ae5b2..0668adc 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -65,6 +65,140 @@ class HostMount():
             self.host_path = self.path
 
 
+class PluginCollection:
+
+    def __init__(self, project, context, directory, config):
+        self._project = project
+        self._context = context
+        self._directory = directory
+        self._plugin_source_origins = []   # Origins of custom sources
+        self._plugin_element_origins = []  # Origins of custom elements
+
+        # Plugin origins and versions
+        origins = _yaml.node_get(config, list, 'plugins', default_value=[])
+        self._source_format_versions = {}
+        self._element_format_versions = {}
+        for origin in origins:
+            allowed_origin_fields = [
+                'origin', 'sources', 'elements',
+                'package-name', 'path',
+            ]
+            allowed_origins = ['core', 'local', 'pip']
+            _yaml.node_validate(origin, allowed_origin_fields)
+
+            if origin['origin'] not in allowed_origins:
+                raise LoadError(
+                    LoadErrorReason.INVALID_YAML,
+                    "Origin '{}' is not one of the allowed types"
+                    .format(origin['origin']))
+
+            # Store source versions for checking later
+            source_versions = _yaml.node_get(origin, Mapping, 'sources', default_value={})
+            for key, _ in _yaml.node_items(source_versions):
+                if key in self._source_format_versions:
+                    raise LoadError(
+                        LoadErrorReason.INVALID_YAML,
+                        "Duplicate listing of source '{}'".format(key))
+                self._source_format_versions[key] = _yaml.node_get(source_versions, int, key)
+
+            # Store element versions for checking later
+            element_versions = _yaml.node_get(origin, Mapping, 'elements', default_value={})
+            for key, _ in _yaml.node_items(element_versions):
+                if key in self._element_format_versions:
+                    raise LoadError(
+                        LoadErrorReason.INVALID_YAML,
+                        "Duplicate listing of element '{}'".format(key))
+                self._element_format_versions[key] = _yaml.node_get(element_versions, int, key)
+
+            # Store the origins if they're not 'core'.
+            # core elements are loaded by default, so storing is unnecessary.
+            if _yaml.node_get(origin, str, 'origin') != 'core':
+                self._store_origin(origin, 'sources', self._plugin_source_origins)
+                self._store_origin(origin, 'elements', self._plugin_element_origins)
+
+        pluginbase = PluginBase(package='buildstream.plugins')
+        self._element_factory = ElementFactory(pluginbase, self._plugin_element_origins)
+        self._source_factory = SourceFactory(pluginbase, self._plugin_source_origins)
+
+    # _store_origin()
+    #
+    # Helper function to store plugin origins
+    #
+    # Args:
+    #    origin (dict) - a dictionary indicating the origin of a group of
+    #                    plugins.
+    #    plugin_group (str) - The name of the type of plugin that is being
+    #                         loaded
+    #    destination (list) - A list of dicts to store the origins in
+    #
+    # Raises:
+    #    LoadError if 'origin' is an unexpected value
+    def _store_origin(self, origin, plugin_group, destination):
+        expected_groups = ['sources', 'elements']
+        if plugin_group not in expected_groups:
+            raise LoadError(LoadErrorReason.INVALID_DATA,
+                            "Unexpected plugin group: {}, expecting {}"
+                            .format(plugin_group, expected_groups))
+        if plugin_group in origin:
+            origin_dict = _yaml.node_copy(origin)
+            plugins = _yaml.node_get(origin, Mapping, plugin_group, default_value={})
+            origin_dict['plugins'] = [k for k, _ in _yaml.node_items(plugins)]
+            for group in expected_groups:
+                if group in origin_dict:
+                    del origin_dict[group]
+            if origin_dict['origin'] == 'local':
+                # paths are passed in relative to the project, but must be absolute
+                origin_dict['path'] = os.path.join(self._directory, origin_dict['path'])
+            destination.append(origin_dict)
+
+    # create_element()
+    #
+    # Instantiate and return an element
+    #
+    # Args:
+    #    artifacts (ArtifactCache): The artifact cache
+    #    meta (MetaElement): The loaded MetaElement
+    #
+    # Returns:
+    #    (Element): A newly created Element object of the appropriate kind
+    #
+    def create_element(self, artifacts, meta):
+        element = self._element_factory.create(self._context, self._project, artifacts, meta)
+        version = self._element_format_versions.get(meta.kind, 0)
+        self._assert_plugin_format(element, version)
+        return element
+
+    def get_element_type(self, kind):
+        return self._element_factory.lookup(kind)
+
+    # create_source()
+    #
+    # Instantiate and return a Source
+    #
+    # Args:
+    #    meta (MetaSource): The loaded MetaSource
+    #
+    # Returns:
+    #    (Source): A newly created Source object of the appropriate kind
+    #
+    def create_source(self, meta):
+        source = self._source_factory.create(self._context, self._project, meta)
+        version = self._source_format_versions.get(meta.kind, 0)
+        self._assert_plugin_format(source, version)
+        return source
+
+    # _assert_plugin_format()
+    #
+    # Helper to raise a PluginError if the loaded plugin is of a lesser version then
+    # the required version for this plugin
+    #
+    def _assert_plugin_format(self, plugin, version):
+        if plugin.BST_FORMAT_VERSION < version:
+            raise LoadError(LoadErrorReason.UNSUPPORTED_PLUGIN,
+                            "{}: Format version {} is too old for requested version {}"
+                            .format(plugin, plugin.BST_FORMAT_VERSION, version))
+
+
 # Project()
 #
 # The Project Configuration
@@ -87,6 +221,7 @@ class Project():
         self.refs = ProjectRefs(self.directory, 'project.refs')
         self.junction_refs = ProjectRefs(self.directory, 'junction.refs')
 
+        self.plugins = None                      # PluginCollection
         self.options = None                      # OptionPool
         self.junction = junction                 # The junction Element object, if this is a subproject
         self.fail_on_overlap = False             # Whether overlaps are treated as errors
@@ -102,13 +237,9 @@ class Project():
         #
         self._context = context  # The invocation Context
         self._aliases = {}       # Aliases dictionary
-        self._plugin_source_origins = []   # Origins of custom sources
-        self._plugin_element_origins = []  # Origins of custom elements
 
         self._cli_options = cli_options
         self._cache_key = None
-        self._source_format_versions = {}
-        self._element_format_versions = {}
 
         self._shell_command = []      # The default interactive shell command
         self._shell_environment = {}  # Statically set environment vars
@@ -175,39 +306,6 @@ class Project():
 
         return self._cache_key
 
-    # create_element()
-    #
-    # Instantiate and return an element
-    #
-    # Args:
-    #    artifacts (ArtifactCache): The artifact cache
-    #    meta (MetaElement): The loaded MetaElement
-    #
-    # Returns:
-    #    (Element): A newly created Element object of the appropriate kind
-    #
-    def create_element(self, artifacts, meta):
-        element = self._element_factory.create(self._context, self, artifacts, meta)
-        version = self._element_format_versions.get(meta.kind, 0)
-        self._assert_plugin_format(element, version)
-        return element
-
-    # create_source()
-    #
-    # Instantiate and return a Source
-    #
-    # Args:
-    #    meta (MetaSource): The loaded MetaSource
-    #
-    # Returns:
-    #    (Source): A newly created Source object of the appropriate kind
-    #
-    def create_source(self, meta):
-        source = self._source_factory.create(self._context, self, meta)
-        version = self._source_format_versions.get(meta.kind, 0)
-        self._assert_plugin_format(source, version)
-        return source
-
     # _load():
     #
     # Loads the project configuration file in the project directory.
@@ -304,50 +402,7 @@ class Project():
         # Load artifacts pull/push configuration for this project
         self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config)
 
-        # Plugin origins and versions
-        origins = _yaml.node_get(config, list, 'plugins', default_value=[])
-        for origin in origins:
-            allowed_origin_fields = [
-                'origin', 'sources', 'elements',
-                'package-name', 'path',
-            ]
-            allowed_origins = ['core', 'local', 'pip']
-            _yaml.node_validate(origin, allowed_origin_fields)
-
-            if origin['origin'] not in allowed_origins:
-                raise LoadError(
-                    LoadErrorReason.INVALID_YAML,
-                    "Origin '{}' is not one of the allowed types"
-                    .format(origin['origin']))
-
-            # Store source versions for checking later
-            source_versions = _yaml.node_get(origin, Mapping, 'sources', default_value={})
-            for key, _ in _yaml.node_items(source_versions):
-                if key in self._source_format_versions:
-                    raise LoadError(
-                        LoadErrorReason.INVALID_YAML,
-                        "Duplicate listing of source '{}'".format(key))
-                self._source_format_versions[key] = _yaml.node_get(source_versions, int, key)
-
-            # Store element versions for checking later
-            element_versions = _yaml.node_get(origin, Mapping, 'elements', default_value={})
-            for key, _ in _yaml.node_items(element_versions):
-                if key in self._element_format_versions:
-                    raise LoadError(
-                        LoadErrorReason.INVALID_YAML,
-                        "Duplicate listing of element '{}'".format(key))
-                self._element_format_versions[key] = _yaml.node_get(element_versions, int, key)
-
-            # Store the origins if they're not 'core'.
-            # core elements are loaded by default, so storing is unnecessary.
-            if _yaml.node_get(origin, str, 'origin') != 'core':
-                self._store_origin(origin, 'sources', self._plugin_source_origins)
-                self._store_origin(origin, 'elements', self._plugin_element_origins)
-
-        pluginbase = PluginBase(package='buildstream.plugins')
-        self._element_factory = ElementFactory(pluginbase, self._plugin_element_origins)
-        self._source_factory = SourceFactory(pluginbase, self._plugin_source_origins)
-
+        self.plugins = PluginCollection(self, self._context, self.directory, config)
         # Source url aliases
         self._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
 
@@ -420,48 +475,6 @@ class Project():
 
             self._shell_host_files.append(mount)
 
-    # _assert_plugin_format()
-    #
-    # Helper to raise a PluginError if the loaded plugin is of a lesser version then
-    # the required version for this plugin
-    #
-    def _assert_plugin_format(self, plugin, version):
-        if plugin.BST_FORMAT_VERSION < version:
-            raise LoadError(LoadErrorReason.UNSUPPORTED_PLUGIN,
-                            "{}: Format version {} is too old for requested version {}"
-                            .format(plugin, plugin.BST_FORMAT_VERSION, version))
-
-    # _store_origin()
-    #
-    # Helper function to store plugin origins
-    #
-    # Args:
-    #    origin (dict) - a dictionary indicating the origin of a group of
-    #                    plugins.
-    #    plugin_group (str) - The name of the type of plugin that is being
-    #                         loaded
-    #    destination (list) - A list of dicts to store the origins in
-    #
-    # Raises:
-    #    LoadError if 'origin' is an unexpected value
-    def _store_origin(self, origin, plugin_group, destination):
-        expected_groups = ['sources', 'elements']
-        if plugin_group not in expected_groups:
-            raise LoadError(LoadErrorReason.INVALID_DATA,
-                            "Unexpected plugin group: {}, expecting {}"
-                            .format(plugin_group, expected_groups))
-        if plugin_group in origin:
-            origin_dict = _yaml.node_copy(origin)
-            plugins = _yaml.node_get(origin, Mapping, plugin_group, default_value={})
-            origin_dict['plugins'] = [k for k, _ in _yaml.node_items(plugins)]
-            for group in expected_groups:
-                if group in origin_dict:
-                    del origin_dict[group]
-            if origin_dict['origin'] == 'local':
-                # paths are passed in relative to the project, but must be absolute
-                origin_dict['path'] = os.path.join(self.directory, origin_dict['path'])
-            destination.append(origin_dict)
-
     # _ensure_project_dir()
     #
     # Returns path of the project directory, if a configuration file is found
diff --git a/buildstream/element.py b/buildstream/element.py
index fc21f80..f8a993a 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -887,16 +887,17 @@ class Element(Plugin):
     @classmethod
     def _new_from_meta(cls, meta, artifacts):
 
+        plugins = meta.project.plugins
+
         if meta in cls.__instantiated_elements:
             return cls.__instantiated_elements[meta]
 
-        project = meta.project
-        element = project.create_element(artifacts, meta)
+        element = plugins.create_element(artifacts, meta)
         cls.__instantiated_elements[meta] = element
 
         # Instantiate sources
         for meta_source in meta.sources:
-            source = project.create_source(meta_source)
+            source = plugins.create_source(meta_source)
             redundant_ref = source._load_ref()
             element.__sources.append(source)
 


[buildstream] 28/30: Inverse priority of in include composition

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit e3756d428adcc31b7554539f795b86cc6ab1fe1f
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Fri Jul 6 14:57:07 2018 +0200

    Inverse priority of in include composition
---
 buildstream/_includes.py | 11 ++++++++++-
 1 file changed, 10 insertions(+), 1 deletion(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index 3640f7a..2779773 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -1,4 +1,5 @@
 import os
+import copy
 from collections import Mapping
 from . import _yaml
 from ._exceptions import LoadError, LoadErrorReason
@@ -39,8 +40,16 @@ class Includes:
                                  only_local=only_local)
                 finally:
                     included.remove(file_path)
-                _yaml.composite(node, include_node)
 
+                old_node = copy.copy(node)
+                while True:
+                    try:
+                        node.popitem()
+                    except KeyError:
+                        break
+                _yaml.composite(node, include_node)
+                _yaml.composite(node, old_node)
+                
         for _, value in _yaml.node_items(node):
             self._process_value(value, current_loader=current_loader,
                                 only_local=only_local)


[buildstream] 07/30: Rename BST_NO_PROJECT_DEFAULTS to (not) BST_PROJECT_INCLUDES_PROCESSED

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 420054b1bd4dd8d9bab075b67900c0834cddb355
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Fri Jun 22 10:29:23 2018 +0200

    Rename BST_NO_PROJECT_DEFAULTS to (not) BST_PROJECT_INCLUDES_PROCESSED
---
 buildstream/_loader/loader.py            |  4 +++-
 buildstream/element.py                   | 23 +++++++++++++----------
 buildstream/plugins/elements/junction.py |  2 +-
 3 files changed, 17 insertions(+), 12 deletions(-)

diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index c5bce62..9c83f1e 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -250,7 +250,9 @@ class Loader():
             kind_type, _ = self.project.first_pass_config.plugins.get_element_type(kind)
         except PluginError:
             kind_type = None
-        if kind_type and hasattr(kind_type, 'BST_NO_PROJECT_DEFAULTS') and kind_type.BST_NO_PROJECT_DEFAULTS:
+        if (kind_type and
+                hasattr(kind_type, 'BST_PROJECT_INCLUDES_PROCESSED') and
+                not kind_type.BST_PROJECT_INCLUDES_PROCESSED):
             self._first_pass_options.process_node(node)
         else:
             if not self.project.is_loaded():
diff --git a/buildstream/element.py b/buildstream/element.py
index ee523ff..ae604c5 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -191,8 +191,11 @@ class Element(Plugin):
     *Since: 1.2*
     """
 
-    BST_NO_PROJECT_DEFAULTS = False
-    """
+    BST_PROJECT_INCLUDES_PROCESSED = True
+    """Whether to load the plugin before processing include directives in
+    project.conf.
+
+    *Since: 1.2*
 
     """
 
@@ -200,7 +203,7 @@ class Element(Plugin):
 
         super().__init__(meta.name, context, project, meta.provenance, "element")
 
-        if not project.is_loaded() and not self.BST_NO_PROJECT_DEFAULTS:
+        if not project.is_loaded() and self.BST_PROJECT_INCLUDES_PROCESSED:
             raise ElementError("{}: Cannot load element before project"
                                .format(self), reason="project-not-loaded")
 
@@ -909,7 +912,7 @@ class Element(Plugin):
 
         # Instantiate sources
         for meta_source in meta.sources:
-            meta_source.first_pass = element.BST_NO_PROJECT_DEFAULTS
+            meta_source.first_pass = not element.BST_PROJECT_INCLUDES_PROCESSED
             source = plugins.create_source(meta_source)
             redundant_ref = source._load_ref()
             element.__sources.append(source)
@@ -2113,7 +2116,7 @@ class Element(Plugin):
         element_bst = _yaml.node_get(element_public, Mapping, 'bst', default_value={})
         element_splits = _yaml.node_get(element_bst, Mapping, 'split-rules', default_value={})
 
-        if self.BST_NO_PROJECT_DEFAULTS:
+        if not self.BST_PROJECT_INCLUDES_PROCESSED:
             splits = _yaml.node_chain_copy(element_splits)
         elif project._splits is None:
             raise LoadError(LoadErrorReason.INVALID_DATA,
@@ -2149,7 +2152,7 @@ class Element(Plugin):
             # Override the element's defaults with element specific
             # overrides from the project.conf
             project = self._get_project()
-            if self.BST_NO_PROJECT_DEFAULTS:
+            if not self.BST_PROJECT_INCLUDES_PROCESSED:
                 elements = project.first_pass_config.element_overrides
             else:
                 elements = project.element_overrides
@@ -2168,7 +2171,7 @@ class Element(Plugin):
     def __extract_environment(self, meta):
         default_env = _yaml.node_get(self.__defaults, Mapping, 'environment', default_value={})
 
-        if self.BST_NO_PROJECT_DEFAULTS:
+        if not self.BST_PROJECT_INCLUDES_PROCESSED:
             environment = {}
         else:
             project = self._get_project()
@@ -2186,7 +2189,7 @@ class Element(Plugin):
         return final_env
 
     def __extract_env_nocache(self, meta):
-        if self.BST_NO_PROJECT_DEFAULTS:
+        if not self.BST_PROJECT_INCLUDES_PROCESSED:
             project_nocache = []
         else:
             project = self._get_project()
@@ -2210,7 +2213,7 @@ class Element(Plugin):
         default_vars = _yaml.node_get(self.__defaults, Mapping, 'variables', default_value={})
 
         project = self._get_project()
-        if self.BST_NO_PROJECT_DEFAULTS:
+        if not self.BST_PROJECT_INCLUDES_PROCESSED:
             variables = _yaml.node_chain_copy(project.first_pass_config.base_variables)
         else:
             assert project.is_loaded()
@@ -2239,7 +2242,7 @@ class Element(Plugin):
     # Sandbox-specific configuration data, to be passed to the sandbox's constructor.
     #
     def __extract_sandbox_config(self, meta):
-        if self.BST_NO_PROJECT_DEFAULTS:
+        if not self.BST_PROJECT_INCLUDES_PROCESSED:
             sandbox_config = {'build-uid': 0,
                               'build-gid': 0}
         else:
diff --git a/buildstream/plugins/elements/junction.py b/buildstream/plugins/elements/junction.py
index 2f81f46..dc6e385 100644
--- a/buildstream/plugins/elements/junction.py
+++ b/buildstream/plugins/elements/junction.py
@@ -136,7 +136,7 @@ class JunctionElement(Element):
     # Junctions are not allowed any dependencies
     BST_FORBID_BDEPENDS = True
     BST_FORBID_RDEPENDS = True
-    BST_NO_PROJECT_DEFAULTS = True
+    BST_PROJECT_INCLUDES_PROCESSED = False
 
     def configure(self, node):
         self.path = self.node_get_member(node, str, 'path', default='')


[buildstream] 22/30: Add missing files

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 5b644425e710793eae8cb951fae872c4613acf77
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Fri Jun 29 11:26:59 2018 +0200

    Add missing files
---
 tests/format/include/inner/element.bst    | 1 +
 tests/format/include/inner/extra_conf.yml | 7 +++++++
 tests/format/include/inner/project.conf   | 5 +++++
 3 files changed, 13 insertions(+)

diff --git a/tests/format/include/inner/element.bst b/tests/format/include/inner/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/inner/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/inner/extra_conf.yml b/tests/format/include/inner/extra_conf.yml
new file mode 100644
index 0000000..4c1847b
--- /dev/null
+++ b/tests/format/include/inner/extra_conf.yml
@@ -0,0 +1,7 @@
+build_arch:
+  type: arch
+  description: Architecture
+  variable: build_arch
+  values:
+    - i586
+    - x86_64
diff --git a/tests/format/include/inner/project.conf b/tests/format/include/inner/project.conf
new file mode 100644
index 0000000..8bdfc42
--- /dev/null
+++ b/tests/format/include/inner/project.conf
@@ -0,0 +1,5 @@
+name: test
+
+options:
+  (@):
+    - extra_conf.yml


[buildstream] 12/30: Forbid inline tracking on fragments coming from junctions

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 795df4e25aab328cc8830fa6093213f11a007314
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 19:06:21 2018 +0200

    Forbid inline tracking on fragments coming from junctions
---
 buildstream/_includes.py      | 10 +++++++---
 buildstream/_loader/loader.py |  2 +-
 buildstream/_yaml.py          | 21 +++++++++++++++------
 buildstream/source.py         | 23 ++++++++++++-----------
 tests/yaml/yaml.py            |  2 +-
 5 files changed, 36 insertions(+), 22 deletions(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index 3837c5d..5b2c8aa 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -26,15 +26,19 @@ class Includes:
             self._process_value(value)
 
     def _include_file(self, include):
+        shortname = include
         if ':' in include:
             junction, include = include.split(':', 1)
             junction_loader = self._loader._get_loader(junction, fetch_subprojects=True)
-            directory = junction_loader.project.directory
+            project = junction_loader.project
         else:
-            directory = self._loader.project.directory
+            project = self._loader.project
+        directory = project.directory
         file_path = os.path.join(directory, include)
         if file_path not in self._loaded:
-            self._loaded[file_path] = _yaml.load(os.path.join(directory, include))
+            self._loaded[file_path] = _yaml.load(os.path.join(directory, include),
+                                                 shortname=shortname,
+                                                 project=project)
         return self._loaded[file_path]
 
     def _process_value(self, value):
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index 9c83f1e..b6221a2 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -219,7 +219,7 @@ class Loader():
         # Load the data and process any conditional statements therein
         fullpath = os.path.join(self._basedir, filename)
         try:
-            node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable)
+            node = _yaml.load(fullpath, shortname=filename, copy_tree=rewritable, project=self.project)
         except LoadError as e:
             if e.reason == LoadErrorReason.MISSING_FILE:
                 # If we can't find the file, try to suggest plausible
diff --git a/buildstream/_yaml.py b/buildstream/_yaml.py
index 0e090e2..d975b1e 100644
--- a/buildstream/_yaml.py
+++ b/buildstream/_yaml.py
@@ -37,6 +37,13 @@ RoundTripConstructor.add_constructor(u'tag:yaml.org,2002:float', RoundTripConstr
 PROVENANCE_KEY = '__bst_provenance_info'
 
 
+class ProvenanceFile():
+    def __init__(self, name, shortname, project):
+        self.name = name
+        self.shortname = shortname
+        self.project = project
+
+
 # Provenance tracks the origin of a given node in the parsed dictionary.
 #
 # Args:
@@ -56,7 +63,7 @@ class Provenance():
 
     # Convert a Provenance to a string for error reporting
     def __str__(self):
-        return "{} [line {:d} column {:d}]".format(self.filename, self.line, self.col)
+        return "{} [line {:d} column {:d}]".format(self.filename.shortname, self.line, self.col)
 
     # Abstract method
     def clone(self):
@@ -174,13 +181,15 @@ class CompositeTypeError(CompositeError):
 #
 # Raises: LoadError
 #
-def load(filename, shortname=None, copy_tree=False):
+def load(filename, shortname=None, copy_tree=False, *, project=None):
     if not shortname:
         shortname = filename
 
+    file = ProvenanceFile(filename, shortname, project)
+
     try:
         with open(filename) as f:
-            return load_data(f, shortname=shortname, copy_tree=copy_tree)
+            return load_data(f, file, copy_tree=copy_tree)
     except FileNotFoundError as e:
         raise LoadError(LoadErrorReason.MISSING_FILE,
                         "Could not find file at {}".format(filename)) from e
@@ -192,7 +201,7 @@ def load(filename, shortname=None, copy_tree=False):
 
 # Like load(), but doesnt require the data to be in a file
 #
-def load_data(data, shortname=None, copy_tree=False):
+def load_data(data, file=None, copy_tree=False):
 
     try:
         contents = yaml.load(data, yaml.loader.RoundTripLoader, preserve_quotes=True)
@@ -207,9 +216,9 @@ def load_data(data, shortname=None, copy_tree=False):
         else:
             raise LoadError(LoadErrorReason.INVALID_YAML,
                             "YAML file has content of type '{}' instead of expected type 'dict': {}"
-                            .format(type(contents).__name__, shortname))
+                            .format(type(contents).__name__, file.name))
 
-    return node_decorated_copy(shortname, contents, copy_tree=copy_tree)
+    return node_decorated_copy(file, contents, copy_tree=copy_tree)
 
 
 # Dumps a previously loaded YAML node to a file
diff --git a/buildstream/source.py b/buildstream/source.py
index c019934..579e405 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -524,6 +524,7 @@ class Source(Plugin):
         toplevel = context.get_toplevel_project()
         toplevel_refs = self._project_refs(toplevel)
         provenance = self._get_provenance()
+        assert provenance.filename.project is not None
 
         element_name = self.__element_name
         element_idx = self.__element_index
@@ -558,24 +559,24 @@ class Source(Plugin):
         #
         # Step 3 - Apply the change in project data
         #
-        if project is toplevel:
-            if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
-                do_save_refs(toplevel_refs)
-            else:
+        if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
+            do_save_refs(toplevel_refs)
+        else:
+            if provenance.filename.project is toplevel:
                 # Save the ref in the originating file
                 #
-                fullname = os.path.join(toplevel.element_path, provenance.filename)
                 try:
-                    _yaml.dump(provenance.toplevel, fullname)
+                    _yaml.dump(_yaml.node_sanitize(provenance.toplevel), provenance.filename.name)
                 except OSError as e:
                     raise SourceError("{}: Error saving source reference to '{}': {}"
-                                      .format(self, provenance.filename, e),
+                                      .format(self, provenance.filename.name, e),
                                       reason="save-ref-error") from e
-        else:
-            if toplevel.ref_storage == ProjectRefStorage.PROJECT_REFS:
-                do_save_refs(toplevel_refs)
-            else:
+            elif provenance.filename.project is project:
                 self.warn("{}: Not persisting new reference in junctioned project".format(self))
+            else:
+                raise SourceError("{}: Cannot track source in a fragment from a junction"
+                                  .format(provenance.filename.shortname),
+                                  reason="tracking-junction-fragment")
 
         return changed
 
diff --git a/tests/yaml/yaml.py b/tests/yaml/yaml.py
index 3b9f385..7817637 100644
--- a/tests/yaml/yaml.py
+++ b/tests/yaml/yaml.py
@@ -33,7 +33,7 @@ def assert_provenance(filename, line, col, node, key=None, indices=[]):
     else:
         assert(isinstance(provenance, _yaml.DictProvenance))
 
-    assert(provenance.filename == filename)
+    assert(provenance.filename.shortname == filename)
     assert(provenance.line == line)
     assert(provenance.col == col)
 


[buildstream] 24/30: Fix indentation

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 1970bb16e5250d2197de9f7d2dfeee3cd54e7910
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Mon Jul 2 12:38:22 2018 +0200

    Fix indentation
---
 buildstream/_includes.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index 0f68fc9..a357723 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -65,7 +65,7 @@ class Includes:
         if file_path not in self._loaded:
             self._loaded[key] = _yaml.load(os.path.join(directory, include),
                                            shortname=shortname,
-                                        project=project)
+                                           project=project)
         return self._loaded[key], file_path, current_loader
 
     def _process_value(self, value, *, current_loader=None):


[buildstream] 09/30: buildstream/_project.py: Document Project._load_pass.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit d97b2c1ee7ef4eab2fb1f11079fb22e779693f08
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 17:03:57 2018 +0200

    buildstream/_project.py: Document Project._load_pass.
---
 buildstream/_project.py | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/buildstream/_project.py b/buildstream/_project.py
index b4aa1ef..67eba0f 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -479,6 +479,16 @@ class Project():
 
             self._shell_host_files.append(mount)
 
+    # _load_pass():
+    #
+    # Loads parts of the project configuration that are different
+    # for first and second pass configurations.
+    #
+    # Args:
+    #    config (dict) - YaML node of the configuration file.
+    #    output (ProjectConfig) - ProjectConfig to load configuration onto.
+    #    ignore_unknown (bool) - Whether option loader shoud ignore unknown options.
+    #
     def _load_pass(self, config, output, ignore_unknown):
 
         # Element and Source  type configurations will be composited later onto


[buildstream] 02/30: Pass targets to Loader.load() instead of its constructor.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 0793cb41755216c9745212a144603e2a96a2bdeb
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Mon Jun 11 15:43:38 2018 +0200

    Pass targets to Loader.load() instead of its constructor.
    
    This is required to be able to build a Loader before the list
    of targets is known.
---
 buildstream/_loader/loader.py | 33 ++++++++++++++++-----------------
 buildstream/_pipeline.py      |  4 ++--
 tests/loader/__init__.py      |  4 ++--
 tests/loader/basics.py        | 23 ++++++++++++-----------
 tests/loader/dependencies.py  | 40 ++++++++++++++++++++--------------------
 5 files changed, 52 insertions(+), 52 deletions(-)

diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index 07b0de9..11afef6 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -46,7 +46,6 @@ from . import MetaSource
 # Args:
 #    context (Context): The Context object
 #    project (Project): The toplevel Project object
-#    filenames (list of str): Target, element-path relative bst filenames in the project
 #    parent (Loader): A parent Loader object, in the case this is a junctioned Loader
 #    tempdir (str): A directory to cleanup with the Loader, given to the loader by a parent
 #                   loader in the case that this loader is a subproject loader.
@@ -54,22 +53,13 @@ from . import MetaSource
 #
 class Loader():
 
-    def __init__(self, context, project, filenames, *, parent=None, tempdir=None, fetch_subprojects=False):
+    def __init__(self, context, project, *, parent=None, tempdir=None, fetch_subprojects=False):
 
         # Ensure we have an absolute path for the base directory
         basedir = project.element_path
         if not os.path.isabs(basedir):
             basedir = os.path.abspath(basedir)
 
-        for filename in filenames:
-            if os.path.isabs(filename):
-                # XXX Should this just be an assertion ?
-                # Expect that the caller gives us the right thing at least ?
-                raise LoadError(LoadErrorReason.INVALID_DATA,
-                                "Target '{}' was not specified as a relative "
-                                "path to the base project directory: {}"
-                                .format(filename, basedir))
-
         #
         # Public members
         #
@@ -82,7 +72,6 @@ class Loader():
         self._context = context
         self._options = project.options      # Project options (OptionPool)
         self._basedir = basedir              # Base project directory
-        self._targets = filenames            # Target bst elements
         self._tempdir = tempdir              # A directory to cleanup
         self._parent = parent                # The parent loader
 
@@ -98,17 +87,27 @@ class Loader():
     #    rewritable (bool): Whether the loaded files should be rewritable
     #                       this is a bit more expensive due to deep copies
     #    ticker (callable): An optional function for tracking load progress
+    #    targets (list of str): Target, element-path relative bst filenames in the project
     #
     # Raises: LoadError
     #
     # Returns: The toplevel LoadElement
-    def load(self, rewritable=False, ticker=None):
+    def load(self, targets, rewritable=False, ticker=None):
+
+        for filename in targets:
+            if os.path.isabs(filename):
+                # XXX Should this just be an assertion ?
+                # Expect that the caller gives us the right thing at least ?
+                raise LoadError(LoadErrorReason.INVALID_DATA,
+                                "Target '{}' was not specified as a relative "
+                                "path to the base project directory: {}"
+                                .format(filename, self._basedir))
 
         # First pass, recursively load files and populate our table of LoadElements
         #
         deps = []
 
-        for target in self._targets:
+        for target in targets:
             profile_start(Topics.LOAD_PROJECT, target)
             junction, name, loader = self._parse_name(target, rewritable, ticker)
             loader._load_file(name, rewritable, ticker)
@@ -126,7 +125,7 @@ class Loader():
         dummy = DummyTarget(name='', full_name='', deps=deps)
         self._elements[''] = dummy
 
-        profile_key = "_".join(t for t in self._targets)
+        profile_key = "_".join(t for t in targets)
         profile_start(Topics.CIRCULAR_CHECK, profile_key)
         self._check_circular_deps('')
         profile_end(Topics.CIRCULAR_CHECK, profile_key)
@@ -135,7 +134,7 @@ class Loader():
         #
         # Sort direct dependencies of elements by their dependency ordering
         #
-        for target in self._targets:
+        for target in targets:
             profile_start(Topics.SORT_DEPENDENCIES, target)
             junction, name, loader = self._parse_name(target, rewritable, ticker)
             loader._sort_dependencies(name)
@@ -546,7 +545,7 @@ class Loader():
             else:
                 raise
 
-        loader = Loader(self._context, project, [],
+        loader = Loader(self._context, project,
                         parent=self,
                         tempdir=basedir,
                         fetch_subprojects=self._fetch_subprojects)
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 9f4504d..1474b37 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -106,11 +106,11 @@ class Pipeline():
 
         profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in targets))
 
-        self._loader = Loader(self._context, self._project, targets,
+        self._loader = Loader(self._context, self._project,
                               fetch_subprojects=fetch_subprojects)
 
         with self._context.timed_activity("Loading pipeline", silent_nested=True):
-            meta_elements = self._loader.load(rewritable, None)
+            meta_elements = self._loader.load(targets, rewritable, None)
 
         # Resolve the real elements now that we've loaded the project
         with self._context.timed_activity("Resolving pipeline"):
diff --git a/tests/loader/__init__.py b/tests/loader/__init__.py
index d64b776..49db9cf 100644
--- a/tests/loader/__init__.py
+++ b/tests/loader/__init__.py
@@ -8,7 +8,7 @@ from buildstream._loader import Loader
 # be removed in favor of testing the functionality via
 # the CLI like in the frontend tests anyway.
 #
-def make_loader(basedir, targets):
+def make_loader(basedir):
     context = Context()
     project = Project(basedir, context)
-    return Loader(context, project, targets)
+    return Loader(context, project)
diff --git a/tests/loader/basics.py b/tests/loader/basics.py
index 3526697..0252683 100644
--- a/tests/loader/basics.py
+++ b/tests/loader/basics.py
@@ -18,9 +18,9 @@ DATA_DIR = os.path.join(
 def test_one_file(datafiles):
 
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/onefile.bst'])
+    loader = make_loader(basedir)
 
-    element = loader.load()[0]
+    element = loader.load(['elements/onefile.bst'])[0]
 
     assert(isinstance(element, MetaElement))
     assert(element.kind == 'pony')
@@ -30,10 +30,10 @@ def test_one_file(datafiles):
 def test_missing_file(datafiles):
 
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/missing.bst'])
+    loader = make_loader(basedir)
 
     with pytest.raises(LoadError) as exc:
-        element = loader.load()[0]
+        element = loader.load(['elements/missing.bst'])[0]
 
     assert (exc.value.reason == LoadErrorReason.MISSING_FILE)
 
@@ -42,10 +42,10 @@ def test_missing_file(datafiles):
 def test_invalid_reference(datafiles):
 
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/badreference.bst'])
+    loader = make_loader(basedir)
 
     with pytest.raises(LoadError) as exc:
-        element = loader.load()[0]
+        element = loader.load(['elements/badreference.bst'])[0]
 
     assert (exc.value.reason == LoadErrorReason.INVALID_YAML)
 
@@ -54,10 +54,10 @@ def test_invalid_reference(datafiles):
 def test_invalid_yaml(datafiles):
 
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/badfile.bst'])
+    loader = make_loader(basedir)
 
     with pytest.raises(LoadError) as exc:
-        element = loader.load()[0]
+        element = loader.load(['elements/badfile.bst'])[0]
 
     assert (exc.value.reason == LoadErrorReason.INVALID_YAML)
 
@@ -69,7 +69,8 @@ def test_fail_fullpath_target(datafiles):
     fullpath = os.path.join(basedir, 'elements', 'onefile.bst')
 
     with pytest.raises(LoadError) as exc:
-        loader = make_loader(basedir, [fullpath])
+        loader = make_loader(basedir)
+        loader.load([fullpath])
 
     assert (exc.value.reason == LoadErrorReason.INVALID_DATA)
 
@@ -78,10 +79,10 @@ def test_fail_fullpath_target(datafiles):
 def test_invalid_key(datafiles):
 
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/invalidkey.bst'])
+    loader = make_loader(basedir)
 
     with pytest.raises(LoadError) as exc:
-        element = loader.load()[0]
+        element = loader.load(['elements/invalidkey.bst'])[0]
 
     assert (exc.value.reason == LoadErrorReason.INVALID_DATA)
 
diff --git a/tests/loader/dependencies.py b/tests/loader/dependencies.py
index 0816e3c..4bb13a3 100644
--- a/tests/loader/dependencies.py
+++ b/tests/loader/dependencies.py
@@ -18,8 +18,8 @@ DATA_DIR = os.path.join(
 def test_two_files(datafiles):
 
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/target.bst'])
-    element = loader.load()[0]
+    loader = make_loader(basedir)
+    element = loader.load(['elements/target.bst'])[0]
 
     assert(isinstance(element, MetaElement))
     assert(element.kind == 'pony')
@@ -34,8 +34,8 @@ def test_two_files(datafiles):
 def test_shared_dependency(datafiles):
 
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/shareddeptarget.bst'])
-    element = loader.load()[0]
+    loader = make_loader(basedir)
+    element = loader.load(['elements/shareddeptarget.bst'])[0]
 
     # Toplevel is 'pony' with 2 dependencies
     #
@@ -77,8 +77,8 @@ def test_shared_dependency(datafiles):
 def test_dependency_dict(datafiles):
 
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/target-depdict.bst'])
-    element = loader.load()[0]
+    loader = make_loader(basedir)
+    element = loader.load(['elements/target-depdict.bst'])[0]
 
     assert(isinstance(element, MetaElement))
     assert(element.kind == 'pony')
@@ -92,10 +92,10 @@ def test_dependency_dict(datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_invalid_dependency_declaration(datafiles):
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/invaliddep.bst'])
+    loader = make_loader(basedir)
 
     with pytest.raises(LoadError) as exc:
-        element = loader.load()[0]
+        element = loader.load(['elements/invaliddep.bst'])[0]
 
     assert (exc.value.reason == LoadErrorReason.INVALID_DATA)
 
@@ -103,10 +103,10 @@ def test_invalid_dependency_declaration(datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_circular_dependency(datafiles):
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/circulartarget.bst'])
+    loader = make_loader(basedir)
 
     with pytest.raises(LoadError) as exc:
-        element = loader.load()[0]
+        element = loader.load(['elements/circulartarget.bst'])[0]
 
     assert (exc.value.reason == LoadErrorReason.CIRCULAR_DEPENDENCY)
 
@@ -114,10 +114,10 @@ def test_circular_dependency(datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_invalid_dependency_type(datafiles):
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/invaliddeptype.bst'])
+    loader = make_loader(basedir)
 
     with pytest.raises(LoadError) as exc:
-        element = loader.load()[0]
+        element = loader.load(['elements/invaliddeptype.bst'])[0]
 
     assert (exc.value.reason == LoadErrorReason.INVALID_DATA)
 
@@ -125,8 +125,8 @@ def test_invalid_dependency_type(datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_build_dependency(datafiles):
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/builddep.bst'])
-    element = loader.load()[0]
+    loader = make_loader(basedir)
+    element = loader.load(['elements/builddep.bst'])[0]
 
     assert(isinstance(element, MetaElement))
     assert(element.kind == 'pony')
@@ -141,8 +141,8 @@ def test_build_dependency(datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_runtime_dependency(datafiles):
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/runtimedep.bst'])
-    element = loader.load()[0]
+    loader = make_loader(basedir)
+    element = loader.load(['elements/runtimedep.bst'])[0]
 
     assert(isinstance(element, MetaElement))
     assert(element.kind == 'pony')
@@ -157,8 +157,8 @@ def test_runtime_dependency(datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_build_runtime_dependency(datafiles):
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/target.bst'])
-    element = loader.load()[0]
+    loader = make_loader(basedir)
+    element = loader.load(['elements/target.bst'])[0]
 
     assert(isinstance(element, MetaElement))
     assert(element.kind == 'pony')
@@ -174,8 +174,8 @@ def test_build_runtime_dependency(datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_all_dependency(datafiles):
     basedir = os.path.join(datafiles.dirname, datafiles.basename)
-    loader = make_loader(basedir, ['elements/alldep.bst'])
-    element = loader.load()[0]
+    loader = make_loader(basedir)
+    element = loader.load(['elements/alldep.bst'])[0]
 
     assert(isinstance(element, MetaElement))
     assert(element.kind == 'pony')


[buildstream] 05/30: Add support for include '(@)' in project.conf and .bst files

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 292766d8f3d36f60d152762ebe928a93ef64d40e
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 13 13:15:15 2018 +0200

    Add support for include '(@)' in project.conf and .bst files
    
    Fixes #331.
---
 buildstream/_includes.py                           |  48 +++++
 buildstream/_loader/loader.py                      |  26 ++-
 buildstream/_loader/metasource.py                  |   1 +
 buildstream/_options/optionpool.py                 |  12 +-
 buildstream/_project.py                            | 186 ++++++++++++-------
 buildstream/_stream.py                             |   1 +
 buildstream/element.py                             |  78 ++++++--
 buildstream/plugins/elements/junction.py           |   1 +
 buildstream/source.py                              |  10 +-
 tests/format/include.py                            | 200 +++++++++++++++++++++
 tests/format/include/defines_name/element.bst      |   1 +
 tests/format/include/defines_name/extra_conf.yml   |   1 +
 tests/format/include/defines_name/project.conf     |   4 +
 tests/format/include/file/element.bst              |   1 +
 tests/format/include/file/extra_conf.yml           |   2 +
 tests/format/include/file/project.conf             |   4 +
 .../include/file_with_subproject/element.bst       |   1 +
 .../include/file_with_subproject/extra_conf.yml    |   2 +
 .../include/file_with_subproject/project.bst       |   4 +
 .../include/file_with_subproject/project.conf      |   4 +
 .../file_with_subproject/subproject/project.conf   |   1 +
 tests/format/include/junction/element.bst          |   1 +
 tests/format/include/junction/project.conf         |   4 +
 .../include/junction/subproject/extra_conf.yml     |   2 +
 .../include/junction/subproject/project.conf       |   1 +
 tests/format/include/options/element.bst           |   1 +
 tests/format/include/options/extra_conf.yml        |   8 +
 tests/format/include/options/project.conf          |   4 +
 tests/format/include/overrides/element.bst         |   1 +
 tests/format/include/overrides/extra_conf.yml      |  16 ++
 tests/format/include/overrides/project.conf        |  20 +++
 .../include/overrides/subproject/project.conf      |   1 +
 tests/format/include/sub-include/element.bst       |   1 +
 tests/format/include/sub-include/manual_conf.yml   |   2 +
 tests/format/include/sub-include/project.conf      |   6 +
 35 files changed, 562 insertions(+), 94 deletions(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
new file mode 100644
index 0000000..718cd82
--- /dev/null
+++ b/buildstream/_includes.py
@@ -0,0 +1,48 @@
+import os
+from collections import Mapping
+from . import _yaml
+
+
+class Includes:
+
+    def __init__(self, loader, valid_keys=None):
+        self._loader = loader
+        self._valid_keys = valid_keys
+
+    def process(self, node):
+        while True:
+            includes = _yaml.node_get(node, list, '(@)', default_value=None)
+            if '(@)' in node:
+                del node['(@)']
+
+            if not includes:
+                break
+
+            for include in includes:
+                include_node = self._include_file(include)
+                if self._valid_keys:
+                    _yaml.node_validate(include_node, self._valid_keys)
+
+                _yaml.composite(node, include_node)
+
+        for _, value in _yaml.node_items(node):
+            self._process_value(value)
+
+    def _include_file(self, include):
+        if ':' in include:
+            junction, include = include.split(':', 1)
+            junction_loader = self._loader._get_loader(junction, fetch_subprojects=True)
+            directory = junction_loader.project.directory
+        else:
+            directory = self._loader.project.directory
+        return _yaml.load(os.path.join(directory, include))
+
+    def _process_value(self, value):
+        if isinstance(value, Mapping):
+            self.process(value)
+        elif isinstance(value, list):
+            self._process_list(value)
+
+    def _process_list(self, values):
+        for value in values:
+            self._process_value(value)
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index a190f17..c5bce62 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -23,12 +23,13 @@ from collections import Mapping, namedtuple
 import tempfile
 import shutil
 
-from .._exceptions import LoadError, LoadErrorReason
+from .._exceptions import LoadError, LoadErrorReason, PluginError
 from .. import Consistency
 from .. import _yaml
 from ..element import Element
 from .._profile import Topics, profile_start, profile_end
 from .._platform import Platform
+from .._includes import Includes
 
 from .types import Symbol, Dependency
 from .loadelement import LoadElement
@@ -69,6 +70,7 @@ class Loader():
         self._context = context
         self._options = project.options      # Project options (OptionPool)
         self._basedir = basedir              # Base project directory
+        self._first_pass_options = project.first_pass_config.options  # Project options (OptionPool)
         self._tempdir = tempdir              # A directory to cleanup
         self._parent = parent                # The parent loader
 
@@ -76,6 +78,8 @@ class Loader():
         self._elements = {}       # Dict of elements
         self._loaders = {}        # Dict of junction loaders
 
+        self._includes = Includes(self)
+
     # load():
     #
     # Loads the project based on the parameters given to the constructor
@@ -241,7 +245,22 @@ class Loader():
                                 message, detail=detail) from e
             else:
                 raise
-        self._options.process_node(node)
+        kind = _yaml.node_get(node, str, Symbol.KIND)
+        try:
+            kind_type, _ = self.project.first_pass_config.plugins.get_element_type(kind)
+        except PluginError:
+            kind_type = None
+        if kind_type and hasattr(kind_type, 'BST_NO_PROJECT_DEFAULTS') and kind_type.BST_NO_PROJECT_DEFAULTS:
+            self._first_pass_options.process_node(node)
+        else:
+            if not self.project.is_loaded():
+                raise LoadError(LoadErrorReason.INVALID_DATA,
+                                "{}: Cannot pre-load. Element depends on project defaults."
+                                .format(filename))
+
+            self._includes.process(node)
+
+            self._options.process_node(node)
 
         element = LoadElement(node, filename, self)
 
@@ -506,7 +525,8 @@ class Loader():
                             "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
 
         platform = Platform.get_platform()
-        element = Element._new_from_meta(meta_element, platform.artifactcache)
+        element = Element._new_from_meta(meta_element, platform.artifactcache,
+                                         first_pass=True)
         element._preflight()
 
         for source in element.sources():
diff --git a/buildstream/_loader/metasource.py b/buildstream/_loader/metasource.py
index 3bcc21e..4241ae5 100644
--- a/buildstream/_loader/metasource.py
+++ b/buildstream/_loader/metasource.py
@@ -38,3 +38,4 @@ class MetaSource():
         self.kind = kind
         self.config = config
         self.directory = directory
+        self.first_pass = False
diff --git a/buildstream/_options/optionpool.py b/buildstream/_options/optionpool.py
index f90fd82..83a202f 100644
--- a/buildstream/_options/optionpool.py
+++ b/buildstream/_options/optionpool.py
@@ -108,15 +108,17 @@ class OptionPool():
     # Args:
     #    cli_options (list): A list of (str, str) tuples
     #
-    def load_cli_values(self, cli_options):
+    def load_cli_values(self, cli_options, ignore_unknown=False):
         for option_name, option_value in cli_options:
             try:
                 option = self._options[option_name]
             except KeyError as e:
-                raise LoadError(LoadErrorReason.INVALID_DATA,
-                                "Unknown option '{}' specified on the command line"
-                                .format(option_name)) from e
-            option.set_value(option_value)
+                if not ignore_unknown:
+                    raise LoadError(LoadErrorReason.INVALID_DATA,
+                                    "Unknown option '{}' specified on the command line"
+                                    .format(option_name)) from e
+            else:
+                option.set_value(option_value)
 
     # resolve()
     #
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 0668adc..b4aa1ef 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -34,6 +34,7 @@ from ._sourcefactory import SourceFactory
 from ._projectrefs import ProjectRefs, ProjectRefStorage
 from ._versions import BST_FORMAT_VERSION
 from ._loader import Loader
+from ._includes import Includes
 
 
 # The separator we use for user specified aliases
@@ -199,12 +200,33 @@ class PluginCollection:
                             .format(plugin, plugin.BST_FORMAT_VERSION, version))
 
 
+class ProjectConfig:
+    def __init__(self):
+        self.plugins = None
+        self.options = None                      # OptionPool
+        self.base_variables = {}                 # The base set of variables
+        self.element_overrides = {}              # Element specific configurations
+        self.source_overrides = {}               # Source specific configurations
+
+
 # Project()
 #
 # The Project Configuration
 #
 class Project():
 
+    INCLUDE_CONFIG_KEYS = ['variables',
+                           'environment', 'environment-nocache',
+                           'split-rules', 'elements', 'plugins',
+                           'aliases', 'artifacts',
+                           'fail-on-overlap', 'shell',
+                           'ref-storage', 'sandbox',
+                           'options']
+
+    MAIN_FILE_CONFIG_KEYS = ['format-version',
+                             'element-path',
+                             'name']
+
     def __init__(self, directory, context, *, junction=None, cli_options=None,
                  parent_loader=None, tempdir=None):
 
@@ -221,16 +243,14 @@ class Project():
         self.refs = ProjectRefs(self.directory, 'project.refs')
         self.junction_refs = ProjectRefs(self.directory, 'junction.refs')
 
-        self.plugins = None                      # PluginCollection
-        self.options = None                      # OptionPool
+        self.config = ProjectConfig()
+        self.first_pass_config = ProjectConfig()
+
         self.junction = junction                 # The junction Element object, if this is a subproject
         self.fail_on_overlap = False             # Whether overlaps are treated as errors
         self.ref_storage = None                  # ProjectRefStorage setting
-        self.base_variables = {}                 # The base set of variables
         self.base_environment = {}               # The base set of environment variables
         self.base_env_nocache = None             # The base nocache mask (list) for the environment
-        self.element_overrides = {}              # Element specific configurations
-        self.source_overrides = {}               # Source specific configurations
 
         #
         # Private Members
@@ -245,15 +265,42 @@ class Project():
         self._shell_environment = {}  # Statically set environment vars
         self._shell_host_files = []   # A list of HostMount objects
 
+        self.artifact_cache_specs = None
+        self._sandbox = None
+        self._splits = None
+
+        self._context.add_project(self)
+
+        self._loaded = False
+
         profile_start(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
-        self._load()
+        self._load(parent_loader=parent_loader, tempdir=tempdir)
         profile_end(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
 
-        self._context.add_project(self)
+        self._loaded = True
 
-        self.loader = Loader(self._context, self,
-                             parent=parent_loader,
-                             tempdir=tempdir)
+    @property
+    def plugins(self):
+        return self.config.plugins
+
+    @property
+    def options(self):
+        return self.config.options
+
+    @property
+    def base_variables(self):
+        return self.config.base_variables
+
+    @property
+    def element_overrides(self):
+        return self.config.element_overrides
+
+    @property
+    def source_overrides(self):
+        return self.config.source_overrides
+
+    def is_loaded(self):
+        return self._loaded
 
     # translate_url():
     #
@@ -312,7 +359,7 @@ class Project():
     #
     # Raises: LoadError if there was a problem with the project.conf
     #
-    def _load(self):
+    def _load(self, parent_loader=None, tempdir=None):
 
         # Load builtin default
         projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
@@ -327,15 +374,6 @@ class Project():
 
         _yaml.composite(config, project_conf)
 
-        # Element and Source  type configurations will be composited later onto
-        # element/source types, so we delete it from here and run our final
-        # assertion after.
-        self.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={})
-        self.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={})
-        config.pop('elements', None)
-        config.pop('sources', None)
-        _yaml.node_final_assertions(config)
-
         # Assert project's format version early, before validating toplevel keys
         format_version = _yaml.node_get(config, int, 'format-version')
         if BST_FORMAT_VERSION < format_version:
@@ -345,17 +383,6 @@ class Project():
                 "Project requested format version {}, but BuildStream {}.{} only supports up until format version {}"
                 .format(format_version, major, minor, BST_FORMAT_VERSION))
 
-        _yaml.node_validate(config, [
-            'format-version',
-            'element-path', 'variables',
-            'environment', 'environment-nocache',
-            'split-rules', 'elements', 'plugins',
-            'aliases', 'name',
-            'artifacts', 'options',
-            'fail-on-overlap', 'shell',
-            'ref-storage', 'sandbox'
-        ])
-
         # The project name, element path and option declarations
         # are constant and cannot be overridden by option conditional statements
         self.name = _yaml.node_get(config, str, 'name')
@@ -369,30 +396,21 @@ class Project():
             _yaml.node_get(config, str, 'element-path')
         )
 
-        # Load project options
-        options_node = _yaml.node_get(config, Mapping, 'options', default_value={})
-        self.options = OptionPool(self.element_path)
-        self.options.load(options_node)
-        if self.junction:
-            # load before user configuration
-            self.options.load_yaml_values(self.junction.options, transform=self.junction._subst_string)
+        self.config.options = OptionPool(self.element_path)
+        self.first_pass_config.options = OptionPool(self.element_path)
 
-        # Collect option values specified in the user configuration
-        overrides = self._context.get_overrides(self.name)
-        override_options = _yaml.node_get(overrides, Mapping, 'options', default_value={})
-        self.options.load_yaml_values(override_options)
-        if self._cli_options:
-            self.options.load_cli_values(self._cli_options)
+        self.loader = Loader(self._context, self,
+                             parent=parent_loader,
+                             tempdir=tempdir)
 
-        # We're done modifying options, now we can use them for substitutions
-        self.options.resolve()
+        self._load_pass(_yaml.node_copy(config), self.first_pass_config, True)
 
-        #
-        # Now resolve any conditionals in the remaining configuration,
-        # any conditionals specified for project option declarations,
-        # or conditionally specifying the project name; will be ignored.
-        #
-        self.options.process_node(config)
+        project_includes = Includes(self.loader, self.INCLUDE_CONFIG_KEYS + ['elements', 'sources'])
+        project_includes.process(config)
+
+        self._load_pass(config, self.config, False)
+
+        _yaml.node_validate(config, self.INCLUDE_CONFIG_KEYS + self.MAIN_FILE_CONFIG_KEYS)
 
         #
         # Now all YAML composition is done, from here on we just load
@@ -402,23 +420,9 @@ class Project():
         # Load artifacts pull/push configuration for this project
         self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config)
 
-        self.plugins = PluginCollection(self, self._context, self.directory, config)
         # Source url aliases
         self._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
 
-        # Load base variables
-        self.base_variables = _yaml.node_get(config, Mapping, 'variables')
-
-        # Add the project name as a default variable
-        self.base_variables['project-name'] = self.name
-
-        # Extend variables with automatic variables and option exports
-        # Initialize it as a string as all variables are processed as strings.
-        self.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
-
-        # Export options into variables, if that was requested
-        self.options.export_variables(self.base_variables)
-
         # Load sandbox environment variables
         self.base_environment = _yaml.node_get(config, Mapping, 'environment')
         self.base_env_nocache = _yaml.node_get(config, list, 'environment-nocache')
@@ -475,6 +479,56 @@ class Project():
 
             self._shell_host_files.append(mount)
 
+    def _load_pass(self, config, output, ignore_unknown):
+
+        # Element and Source  type configurations will be composited later onto
+        # element/source types, so we delete it from here and run our final
+        # assertion after.
+        output.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={})
+        output.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={})
+        config.pop('elements', None)
+        config.pop('sources', None)
+        _yaml.node_final_assertions(config)
+
+        output.plugins = PluginCollection(self, self._context, self.directory, config)
+
+        # Load project options
+        options_node = _yaml.node_get(config, Mapping, 'options', default_value={})
+        output.options.load(options_node)
+        if self.junction:
+            # load before user configuration
+            output.options.load_yaml_values(self.junction.options, transform=self.junction._subst_string)
+
+        # Collect option values specified in the user configuration
+        overrides = self._context.get_overrides(self.name)
+        override_options = _yaml.node_get(overrides, Mapping, 'options', default_value={})
+        output.options.load_yaml_values(override_options)
+        if self._cli_options:
+            output.options.load_cli_values(self._cli_options, ignore_unknown=ignore_unknown)
+
+        # We're done modifying options, now we can use them for substitutions
+        output.options.resolve()
+
+        #
+        # Now resolve any conditionals in the remaining configuration,
+        # any conditionals specified for project option declarations,
+        # or conditionally specifying the project name; will be ignored.
+        #
+        output.options.process_node(config)
+
+        # Load base variables
+        output.base_variables = _yaml.node_get(config, Mapping, 'variables')
+
+        # Add the project name as a default variable
+        output.base_variables['project-name'] = self.name
+
+        # Extend variables with automatic variables and option exports
+        # Initialize it as a string as all variables are processed as strings.
+        output.base_variables['max-jobs'] = str(multiprocessing.cpu_count())
+
+        # Export options into variables, if that was requested
+        output.options.export_variables(output.base_variables)
+
     # _ensure_project_dir()
     #
     # Returns path of the project directory, if a configuration file is found
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index 48d3571..4801ecc 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -75,6 +75,7 @@ class Stream():
         self._artifacts = self._platform.artifactcache
         self._context = context
         self._project = project
+
         self._pipeline = Pipeline(context, project, self._artifacts)
         self._scheduler = Scheduler(context, session_start,
                                     interrupt_callback=interrupt_callback,
diff --git a/buildstream/element.py b/buildstream/element.py
index f8a993a..ee523ff 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -191,10 +191,19 @@ class Element(Plugin):
     *Since: 1.2*
     """
 
+    BST_NO_PROJECT_DEFAULTS = False
+    """
+
+    """
+
     def __init__(self, context, project, artifacts, meta, plugin_conf):
 
         super().__init__(meta.name, context, project, meta.provenance, "element")
 
+        if not project.is_loaded() and not self.BST_NO_PROJECT_DEFAULTS:
+            raise ElementError("{}: Cannot load element before project"
+                               .format(self), reason="project-not-loaded")
+
         self.normal_name = os.path.splitext(self.name.replace(os.sep, '-'))[0]
         """A normalized element name
 
@@ -885,9 +894,12 @@ class Element(Plugin):
     #    (Element): A newly created Element instance
     #
     @classmethod
-    def _new_from_meta(cls, meta, artifacts):
+    def _new_from_meta(cls, meta, artifacts, first_pass=False):
 
-        plugins = meta.project.plugins
+        if first_pass:
+            plugins = meta.project.first_pass_config.plugins
+        else:
+            plugins = meta.project.plugins
 
         if meta in cls.__instantiated_elements:
             return cls.__instantiated_elements[meta]
@@ -897,6 +909,7 @@ class Element(Plugin):
 
         # Instantiate sources
         for meta_source in meta.sources:
+            meta_source.first_pass = element.BST_NO_PROJECT_DEFAULTS
             source = plugins.create_source(meta_source)
             redundant_ref = source._load_ref()
             element.__sources.append(source)
@@ -907,10 +920,10 @@ class Element(Plugin):
 
         # Instantiate dependencies
         for meta_dep in meta.dependencies:
-            dependency = Element._new_from_meta(meta_dep, artifacts)
+            dependency = Element._new_from_meta(meta_dep, artifacts, first_pass=first_pass)
             element.__runtime_dependencies.append(dependency)
         for meta_dep in meta.build_dependencies:
-            dependency = Element._new_from_meta(meta_dep, artifacts)
+            dependency = Element._new_from_meta(meta_dep, artifacts, first_pass=first_pass)
             element.__build_dependencies.append(dependency)
 
         return element
@@ -2095,16 +2108,24 @@ class Element(Plugin):
 
     def __compose_default_splits(self, defaults):
         project = self._get_project()
-        project_splits = _yaml.node_chain_copy(project._splits)
 
         element_public = _yaml.node_get(defaults, Mapping, 'public', default_value={})
         element_bst = _yaml.node_get(element_public, Mapping, 'bst', default_value={})
         element_splits = _yaml.node_get(element_bst, Mapping, 'split-rules', default_value={})
 
-        # Extend project wide split rules with any split rules defined by the element
-        _yaml.composite(project_splits, element_splits)
+        if self.BST_NO_PROJECT_DEFAULTS:
+            splits = _yaml.node_chain_copy(element_splits)
+        elif project._splits is None:
+            raise LoadError(LoadErrorReason.INVALID_DATA,
+                            "{}: Project was not fully loaded while loading element. "
+                            "Only non-artifact elements (e.g. junctions) are allowed in this context."
+                            .format(self.name))
+        else:
+            splits = _yaml.node_chain_copy(project._splits)
+            # Extend project wide split rules with any split rules defined by the element
+            _yaml.composite(splits, element_splits)
 
-        element_bst['split-rules'] = project_splits
+        element_bst['split-rules'] = splits
         element_public['bst'] = element_bst
         defaults['public'] = element_public
 
@@ -2128,7 +2149,11 @@ class Element(Plugin):
             # Override the element's defaults with element specific
             # overrides from the project.conf
             project = self._get_project()
-            elements = project.element_overrides
+            if self.BST_NO_PROJECT_DEFAULTS:
+                elements = project.first_pass_config.element_overrides
+            else:
+                elements = project.element_overrides
+
             overrides = elements.get(self.get_kind())
             if overrides:
                 _yaml.composite(defaults, overrides)
@@ -2141,10 +2166,14 @@ class Element(Plugin):
     # creating sandboxes for this element
     #
     def __extract_environment(self, meta):
-        project = self._get_project()
         default_env = _yaml.node_get(self.__defaults, Mapping, 'environment', default_value={})
 
-        environment = _yaml.node_chain_copy(project.base_environment)
+        if self.BST_NO_PROJECT_DEFAULTS:
+            environment = {}
+        else:
+            project = self._get_project()
+            environment = _yaml.node_chain_copy(project.base_environment)
+
         _yaml.composite(environment, default_env)
         _yaml.composite(environment, meta.environment)
         _yaml.node_final_assertions(environment)
@@ -2157,8 +2186,13 @@ class Element(Plugin):
         return final_env
 
     def __extract_env_nocache(self, meta):
-        project = self._get_project()
-        project_nocache = project.base_env_nocache
+        if self.BST_NO_PROJECT_DEFAULTS:
+            project_nocache = []
+        else:
+            project = self._get_project()
+            assert project.is_loaded()
+            project_nocache = project.base_env_nocache
+
         default_nocache = _yaml.node_get(self.__defaults, list, 'environment-nocache', default_value=[])
         element_nocache = meta.env_nocache
 
@@ -2173,10 +2207,15 @@ class Element(Plugin):
     # substituting command strings to be run in the sandbox
     #
     def __extract_variables(self, meta):
-        project = self._get_project()
         default_vars = _yaml.node_get(self.__defaults, Mapping, 'variables', default_value={})
 
-        variables = _yaml.node_chain_copy(project.base_variables)
+        project = self._get_project()
+        if self.BST_NO_PROJECT_DEFAULTS:
+            variables = _yaml.node_chain_copy(project.first_pass_config.base_variables)
+        else:
+            assert project.is_loaded()
+            variables = _yaml.node_chain_copy(project.base_variables)
+
         _yaml.composite(variables, default_vars)
         _yaml.composite(variables, meta.variables)
         _yaml.node_final_assertions(variables)
@@ -2200,13 +2239,18 @@ class Element(Plugin):
     # Sandbox-specific configuration data, to be passed to the sandbox's constructor.
     #
     def __extract_sandbox_config(self, meta):
-        project = self._get_project()
+        if self.BST_NO_PROJECT_DEFAULTS:
+            sandbox_config = {'build-uid': 0,
+                              'build-gid': 0}
+        else:
+            project = self._get_project()
+            assert project.is_loaded()
+            sandbox_config = _yaml.node_chain_copy(project._sandbox)
 
         # The default config is already composited with the project overrides
         sandbox_defaults = _yaml.node_get(self.__defaults, Mapping, 'sandbox', default_value={})
         sandbox_defaults = _yaml.node_chain_copy(sandbox_defaults)
 
-        sandbox_config = _yaml.node_chain_copy(project._sandbox)
         _yaml.composite(sandbox_config, sandbox_defaults)
         _yaml.composite(sandbox_config, meta.sandbox)
         _yaml.node_final_assertions(sandbox_config)
diff --git a/buildstream/plugins/elements/junction.py b/buildstream/plugins/elements/junction.py
index ee5ed24..2f81f46 100644
--- a/buildstream/plugins/elements/junction.py
+++ b/buildstream/plugins/elements/junction.py
@@ -136,6 +136,7 @@ class JunctionElement(Element):
     # Junctions are not allowed any dependencies
     BST_FORBID_BDEPENDS = True
     BST_FORBID_RDEPENDS = True
+    BST_NO_PROJECT_DEFAULTS = True
 
     def configure(self, node):
         self.path = self.node_get_member(node, str, 'path', default='')
diff --git a/buildstream/source.py b/buildstream/source.py
index ec38ae8..c019934 100644
--- a/buildstream/source.py
+++ b/buildstream/source.py
@@ -137,8 +137,9 @@ class Source(Plugin):
 
         # Collect the composited element configuration and
         # ask the element to configure itself.
-        self.__init_defaults()
+        self.__init_defaults(meta)
         self.__config = self.__extract_config(meta)
+
         self.configure(self.__config)
 
     COMMON_CONFIG_KEYS = ['kind', 'directory']
@@ -611,10 +612,13 @@ class Source(Plugin):
                               reason="ensure-stage-dir-fail") from e
         return directory
 
-    def __init_defaults(self):
+    def __init_defaults(self, meta):
         if not self.__defaults_set:
             project = self._get_project()
-            sources = project.source_overrides
+            if meta.first_pass:
+                sources = project.first_pass_config.source_overrides
+            else:
+                sources = project.source_overrides
             type(self).__defaults = sources.get(self.get_kind(), {})
             type(self).__defaults_set = True
 
diff --git a/tests/format/include.py b/tests/format/include.py
new file mode 100644
index 0000000..ca6eaab
--- /dev/null
+++ b/tests/format/include.py
@@ -0,0 +1,200 @@
+import os
+import pytest
+from buildstream import _yaml
+from buildstream._exceptions import ErrorDomain, LoadErrorReason
+from tests.testutils import cli, generate_junction, create_repo
+
+
+# Project directory
+DATA_DIR = os.path.join(
+    os.path.dirname(os.path.realpath(__file__)),
+    'include'
+)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_project_file(cli, datafiles):
+    project = os.path.join(str(datafiles), 'file')
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert loaded['included'] == 'True'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_junction_file(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'junction')
+
+    generate_junction(tmpdir,
+                      os.path.join(project, 'subproject'),
+                      os.path.join(project, 'junction.bst'),
+                      store_ref=True)
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert loaded['included'] == 'True'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_junction_options(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'options')
+
+    result = cli.run(project=project, args=[
+        '-o', 'build_arch', 'x86_64',
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert loaded['build_arch'] == 'x86_64'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_project_defines_name(cli, datafiles):
+    project = os.path.join(str(datafiles), 'defines_name')
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
+    """
+    Junction elements never depend on fully include processed project.
+    """
+
+    project = os.path.join(str(datafiles), 'junction')
+
+    subproject_path = os.path.join(project, 'subproject')
+    junction_path = os.path.join(project, 'junction.bst')
+
+    repo = create_repo('git', str(tmpdir))
+
+    ref = repo.create(subproject_path)
+
+    element = {
+        'kind': 'junction',
+        'sources': [
+            repo.source_config(ref=ref)
+        ]
+    }
+    _yaml.dump(element, junction_path)
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'junction.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert 'included' not in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_junction_element_partial_project_file(cli, tmpdir, datafiles):
+    """
+    Junction elements never depend on fully include processed project.
+    """
+
+    project = os.path.join(str(datafiles), 'file_with_subproject')
+
+    subproject_path = os.path.join(project, 'subproject')
+    junction_path = os.path.join(project, 'junction.bst')
+
+    repo = create_repo('git', str(tmpdir))
+
+    ref = repo.create(subproject_path)
+
+    element = {
+        'kind': 'junction',
+        'sources': [
+            repo.source_config(ref=ref)
+        ]
+    }
+    _yaml.dump(element, junction_path)
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'junction.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert 'included' not in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_element_overrides(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'overrides')
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert 'manual_main_override' in loaded
+    assert 'manual_included_override' in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_element_overrides_composition(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'overrides')
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{config}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert 'build-commands' in loaded
+    assert loaded['build-commands'] == ['first', 'second']
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_include_element_overrides_sub_include(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'sub-include')
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert 'included' in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_junction_do_not_use_included_overrides(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'overrides')
+
+    generate_junction(tmpdir,
+                      os.path.join(project, 'subproject'),
+                      os.path.join(project, 'junction.bst'),
+                      store_ref=True)
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'junction.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert 'main_override' in loaded
+    assert 'included_override' not in loaded
diff --git a/tests/format/include/defines_name/element.bst b/tests/format/include/defines_name/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/defines_name/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/defines_name/extra_conf.yml b/tests/format/include/defines_name/extra_conf.yml
new file mode 100644
index 0000000..84e8c6a
--- /dev/null
+++ b/tests/format/include/defines_name/extra_conf.yml
@@ -0,0 +1 @@
+name: othername
diff --git a/tests/format/include/defines_name/project.conf b/tests/format/include/defines_name/project.conf
new file mode 100644
index 0000000..a7791a4
--- /dev/null
+++ b/tests/format/include/defines_name/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+  - extra_conf.yml
diff --git a/tests/format/include/file/element.bst b/tests/format/include/file/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/file/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/file/extra_conf.yml b/tests/format/include/file/extra_conf.yml
new file mode 100644
index 0000000..404ecd6
--- /dev/null
+++ b/tests/format/include/file/extra_conf.yml
@@ -0,0 +1,2 @@
+variables:
+  included: 'True'
diff --git a/tests/format/include/file/project.conf b/tests/format/include/file/project.conf
new file mode 100644
index 0000000..a7791a4
--- /dev/null
+++ b/tests/format/include/file/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+  - extra_conf.yml
diff --git a/tests/format/include/file_with_subproject/element.bst b/tests/format/include/file_with_subproject/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/file_with_subproject/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/file_with_subproject/extra_conf.yml b/tests/format/include/file_with_subproject/extra_conf.yml
new file mode 100644
index 0000000..404ecd6
--- /dev/null
+++ b/tests/format/include/file_with_subproject/extra_conf.yml
@@ -0,0 +1,2 @@
+variables:
+  included: 'True'
diff --git a/tests/format/include/file_with_subproject/project.bst b/tests/format/include/file_with_subproject/project.bst
new file mode 100644
index 0000000..4836c5f
--- /dev/null
+++ b/tests/format/include/file_with_subproject/project.bst
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+  - junction.bst:extra_conf.yml
diff --git a/tests/format/include/file_with_subproject/project.conf b/tests/format/include/file_with_subproject/project.conf
new file mode 100644
index 0000000..a7791a4
--- /dev/null
+++ b/tests/format/include/file_with_subproject/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+  - extra_conf.yml
diff --git a/tests/format/include/file_with_subproject/subproject/project.conf b/tests/format/include/file_with_subproject/subproject/project.conf
new file mode 100644
index 0000000..7a66554
--- /dev/null
+++ b/tests/format/include/file_with_subproject/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub
diff --git a/tests/format/include/junction/element.bst b/tests/format/include/junction/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/junction/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/junction/project.conf b/tests/format/include/junction/project.conf
new file mode 100644
index 0000000..4836c5f
--- /dev/null
+++ b/tests/format/include/junction/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+  - junction.bst:extra_conf.yml
diff --git a/tests/format/include/junction/subproject/extra_conf.yml b/tests/format/include/junction/subproject/extra_conf.yml
new file mode 100644
index 0000000..404ecd6
--- /dev/null
+++ b/tests/format/include/junction/subproject/extra_conf.yml
@@ -0,0 +1,2 @@
+variables:
+  included: 'True'
diff --git a/tests/format/include/junction/subproject/project.conf b/tests/format/include/junction/subproject/project.conf
new file mode 100644
index 0000000..7a66554
--- /dev/null
+++ b/tests/format/include/junction/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub
diff --git a/tests/format/include/options/element.bst b/tests/format/include/options/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/options/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/options/extra_conf.yml b/tests/format/include/options/extra_conf.yml
new file mode 100644
index 0000000..ad1401e
--- /dev/null
+++ b/tests/format/include/options/extra_conf.yml
@@ -0,0 +1,8 @@
+options:
+  build_arch:
+    type: arch
+    description: Architecture
+    variable: build_arch
+    values:
+      - i586
+      - x86_64
diff --git a/tests/format/include/options/project.conf b/tests/format/include/options/project.conf
new file mode 100644
index 0000000..a7791a4
--- /dev/null
+++ b/tests/format/include/options/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+  - extra_conf.yml
diff --git a/tests/format/include/overrides/element.bst b/tests/format/include/overrides/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/overrides/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/overrides/extra_conf.yml b/tests/format/include/overrides/extra_conf.yml
new file mode 100644
index 0000000..3cd3530
--- /dev/null
+++ b/tests/format/include/overrides/extra_conf.yml
@@ -0,0 +1,16 @@
+elements:
+  junction:
+    variables:
+      included_override: True
+  manual:
+    variables:
+      manual_included_override: True
+    config:
+      build-commands:
+        (>):
+          - "second"
+
+sources:
+  git:
+    variables:
+      from_included: True
diff --git a/tests/format/include/overrides/project.conf b/tests/format/include/overrides/project.conf
new file mode 100644
index 0000000..9285b9d
--- /dev/null
+++ b/tests/format/include/overrides/project.conf
@@ -0,0 +1,20 @@
+name: test
+
+elements:
+  junction:
+    variables:
+      main_override: True
+  manual:
+    variables:
+      manual_main_override: True
+    config:
+      build-commands:
+        - "first"
+
+sources:
+  git:
+    variables:
+      from_main: True
+
+(@):
+  - extra_conf.yml
diff --git a/tests/format/include/overrides/subproject/project.conf b/tests/format/include/overrides/subproject/project.conf
new file mode 100644
index 0000000..7a66554
--- /dev/null
+++ b/tests/format/include/overrides/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub
diff --git a/tests/format/include/sub-include/element.bst b/tests/format/include/sub-include/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/sub-include/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/sub-include/manual_conf.yml b/tests/format/include/sub-include/manual_conf.yml
new file mode 100644
index 0000000..9c2c0dd
--- /dev/null
+++ b/tests/format/include/sub-include/manual_conf.yml
@@ -0,0 +1,2 @@
+variables:
+  included: True
diff --git a/tests/format/include/sub-include/project.conf b/tests/format/include/sub-include/project.conf
new file mode 100644
index 0000000..7f7df84
--- /dev/null
+++ b/tests/format/include/sub-include/project.conf
@@ -0,0 +1,6 @@
+name: test
+
+elements:
+  manual:
+    (@):
+      - manual_conf.yml


[buildstream] 10/30: buildstream/_includes.py: Cache loaded fragments.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit c31e5a10796a26aabe83b819b255688e56682c0a
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 17:06:36 2018 +0200

    buildstream/_includes.py: Cache loaded fragments.
---
 buildstream/_includes.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index 718cd82..eed73a0 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -8,6 +8,7 @@ class Includes:
     def __init__(self, loader, valid_keys=None):
         self._loader = loader
         self._valid_keys = valid_keys
+        self._loaded = {}
 
     def process(self, node):
         while True:
@@ -35,7 +36,10 @@ class Includes:
             directory = junction_loader.project.directory
         else:
             directory = self._loader.project.directory
-        return _yaml.load(os.path.join(directory, include))
+        file_path = os.path.join(directory, include)
+        if file_path not in self._loaded:
+            self._loaded[file_path] = _yaml.load(os.path.join(directory, include))
+        return self._loaded[file_path]
 
     def _process_value(self, value):
         if isinstance(value, Mapping):


[buildstream] 19/30: doc/source/formatintro.rst: Use references where possible in Include section.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 37b1a77c0fb0c575555956862eeb20caf94fe289
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Thu Jun 28 15:10:32 2018 +0200

    doc/source/formatintro.rst: Use references where possible in Include section.
---
 doc/source/format_intro.rst | 27 ++++++++++++++-------------
 1 file changed, 14 insertions(+), 13 deletions(-)

diff --git a/doc/source/format_intro.rst b/doc/source/format_intro.rst
index d0a934a..c7b5c14 100644
--- a/doc/source/format_intro.rst
+++ b/doc/source/format_intro.rst
@@ -295,25 +295,26 @@ free form and not validated.
 Indicates that content should be loaded from files.
 
 The include directive expects a list of strings. Those are file names
-relative to project directory. Or they can be prefixed with a junction
-name and a colon (':'). In that case, the remain of the string is a
-file name relative to the project of the junction.
+relative to project directory. Or they can be prefixed with a
+:mod:`junction <elements.junction>` name and a colon (':'). In that
+case, the remain of the string is a file name relative to the project
+of the junction.
 
-The include directive can be used in ``project.conf`` or in a ``.bst``
-file.  It can also be used in a file included by another include
-directive.
+The include directive can be used in :ref:`project.conf <projectconf>`
+or in a :ref:`.bst <format_basics>` file.  It can also be used in a
+file included by another include directive.
 
 Included files are composed into the including file. The files should
-take care of composition using list directives.
-
-Some ``project.conf`` configuration is not overridable by includes:
-``name``, ``format-version`` and ``element-path``.
+take care of composition using :ref:`list directives
+<format_directives_list_prepend>`.
 
 Junction elements never use values from included files from
-``project.conf``.  Variables, element overrides and source overrides
-required by junctions should all be directly in the ``project.conf``.
+:ref:`project.conf <projectconf>`.  Variables, :ref:`element overrides
+<project_element_overrides>` and :ref:`source overrides
+<project_source_overrides>` required by junctions should all be
+directly in the ref:`project.conf <projectconf>`.
 
-Junction elements cannot use the include directive.
+Junction elements cannot use the include directives.
 
 **Example:**
 


[buildstream] 21/30: Fix issue when with including in sub-nodes.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit ddeb066ea7817b8ef6217b6b9029868c93680dc4
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Fri Jun 29 10:11:53 2018 +0200

    Fix issue when with including in sub-nodes.
---
 buildstream/_includes.py | 10 ++++++++++
 buildstream/_project.py  |  8 ++++++--
 tests/format/include.py  | 14 ++++++++++++++
 3 files changed, 30 insertions(+), 2 deletions(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index 2bd885a..fd99a6c 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -10,6 +10,16 @@ class Includes:
         self._loader = loader
         self._loaded = {}
 
+    def ignore_includes(self, node):
+        if isinstance(node, Mapping):
+            if '(@)' in node:
+                del node['(@)']
+            for _, value in _yaml.node_items(node):
+                self.ignore_includes(value)
+        elif isinstance(node, list):
+            for value in node:
+                self.ignore_includes(value)
+
     def process(self, node, *, included=set()):
         includes = _yaml.node_get(node, list, '(@)', default_value=None)
         if '(@)' in node:
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 7aa72b6..2b9d637 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -455,9 +455,13 @@ class Project():
                              parent=parent_loader,
                              tempdir=tempdir)
 
-        self._load_pass(_yaml.node_copy(config), self.first_pass_config, True)
-
         project_includes = Includes(self.loader)
+
+        config_no_include = _yaml.node_copy(config)
+        project_includes.ignore_includes(config_no_include)
+
+        self._load_pass(config_no_include, self.first_pass_config, True)
+
         project_includes.process(config)
 
         self._load_pass(config, self.config, False)
diff --git a/tests/format/include.py b/tests/format/include.py
index 8a79ed9..4b26c97 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -215,3 +215,17 @@ def test_recusive_include(cli, tmpdir, datafiles):
         '--format', '%{vars}',
         'element.bst'])
     result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.RECURSIVE_INCLUDE)
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_inner(cli, datafiles):
+    project = os.path.join(str(datafiles), 'inner')
+    result = cli.run(project=project, args=[
+        '-o', 'build_arch', 'x86_64',
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert loaded['build_arch'] == 'x86_64'


[buildstream] 27/30: Always process local includes.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 52b6b40838ba590de53d181b449fd337730baae0
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Mon Jul 2 11:55:55 2018 +0200

    Always process local includes.
---
 buildstream/_includes.py                           | 35 +++++++++++-----------
 buildstream/_project.py                            |  3 +-
 tests/format/include.py                            |  6 ++--
 .../format/include/overrides-junction/element.bst  |  1 +
 .../format/include/overrides-junction/project.conf | 20 +++++++++++++
 .../overrides-junction/subproject/extra_conf.yml   | 16 ++++++++++
 .../overrides-junction/subproject/project.conf     |  1 +
 7 files changed, 59 insertions(+), 23 deletions(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index a357723..3640f7a 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -10,19 +10,10 @@ class Includes:
         self._loader = loader
         self._loaded = {}
 
-    def ignore_includes(self, node):
-        if isinstance(node, Mapping):
-            if '(@)' in node:
-                del node['(@)']
-            for _, value in _yaml.node_items(node):
-                self.ignore_includes(value)
-        elif isinstance(node, list):
-            for value in node:
-                self.ignore_includes(value)
-
     def process(self, node, *,
                 included=set(),
-                current_loader=None):
+                current_loader=None,
+                only_local=False):
         if current_loader is None:
             current_loader = self._loader
 
@@ -32,6 +23,8 @@ class Includes:
 
         if includes:
             for include in includes:
+                if only_local and ':' in include:
+                    continue
                 include_node, file_path, sub_loader = self._include_file(include,
                                                                          current_loader)
                 if file_path in included:
@@ -42,13 +35,15 @@ class Includes:
                 try:
                     included.add(file_path)
                     self.process(include_node, included=included,
-                                 current_loader=sub_loader)
+                                 current_loader=sub_loader,
+                                 only_local=only_local)
                 finally:
                     included.remove(file_path)
                 _yaml.composite(node, include_node)
 
         for _, value in _yaml.node_items(node):
-            self._process_value(value, current_loader=current_loader)
+            self._process_value(value, current_loader=current_loader,
+                                only_local=only_local)
 
     def _include_file(self, include, loader):
         shortname = include
@@ -68,12 +63,16 @@ class Includes:
                                            project=project)
         return self._loaded[key], file_path, current_loader
 
-    def _process_value(self, value, *, current_loader=None):
+    def _process_value(self, value, *,
+                       current_loader=None,
+                       only_local=False):
         if isinstance(value, Mapping):
-            self.process(value, current_loader=current_loader)
+            self.process(value, current_loader=current_loader, only_local=only_local)
         elif isinstance(value, list):
-            self._process_list(value, current_loader=current_loader)
+            self._process_list(value, current_loader=current_loader, only_local=only_local)
 
-    def _process_list(self, values, *, current_loader=None):
+    def _process_list(self, values, *,
+                      current_loader=None,
+                      only_local=False):
         for value in values:
-            self._process_value(value, current_loader=current_loader)
+            self._process_value(value, current_loader=current_loader, only_local=only_local)
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 5156539..5e89f88 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -301,7 +301,6 @@ class Project():
     def source_overrides(self):
         return self.config.source_overrides
 
-
     # translate_url():
     #
     # Translates the given url which may be specified with an alias
@@ -459,7 +458,7 @@ class Project():
         self._project_includes = Includes(self.loader)
 
         config_no_include = _yaml.node_copy(self._config_node)
-        self._project_includes.ignore_includes(config_no_include)
+        self._project_includes.process(config_no_include, only_local=True)
 
         self._load_pass(config_no_include, self.first_pass_config, True)
 
diff --git a/tests/format/include.py b/tests/format/include.py
index ba8d4a0..938b987 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -93,7 +93,7 @@ def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
 
 
 @pytest.mark.datafiles(DATA_DIR)
-def test_junction_element_partial_project_file(cli, tmpdir, datafiles):
+def test_junction_element_not_partial_project_file(cli, tmpdir, datafiles):
     """
     Junction elements never depend on fully include processed project.
     """
@@ -122,7 +122,7 @@ def test_junction_element_partial_project_file(cli, tmpdir, datafiles):
         'junction.bst'])
     result.assert_success()
     loaded = _yaml.load_data(result.output)
-    assert 'included' not in loaded
+    assert 'included' in loaded
 
 
 @pytest.mark.datafiles(DATA_DIR)
@@ -171,7 +171,7 @@ def test_include_element_overrides_sub_include(cli, tmpdir, datafiles):
 
 @pytest.mark.datafiles(DATA_DIR)
 def test_junction_do_not_use_included_overrides(cli, tmpdir, datafiles):
-    project = os.path.join(str(datafiles), 'overrides')
+    project = os.path.join(str(datafiles), 'overrides-junction')
 
     generate_junction(tmpdir,
                       os.path.join(project, 'subproject'),
diff --git a/tests/format/include/overrides-junction/element.bst b/tests/format/include/overrides-junction/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/overrides-junction/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/overrides-junction/project.conf b/tests/format/include/overrides-junction/project.conf
new file mode 100644
index 0000000..d03bec6
--- /dev/null
+++ b/tests/format/include/overrides-junction/project.conf
@@ -0,0 +1,20 @@
+name: test
+
+elements:
+  junction:
+    variables:
+      main_override: True
+  manual:
+    variables:
+      manual_main_override: True
+    config:
+      build-commands:
+        - "first"
+
+sources:
+  git:
+    variables:
+      from_main: True
+
+(@):
+  - junction.bst:extra_conf.yml
diff --git a/tests/format/include/overrides-junction/subproject/extra_conf.yml b/tests/format/include/overrides-junction/subproject/extra_conf.yml
new file mode 100644
index 0000000..3cd3530
--- /dev/null
+++ b/tests/format/include/overrides-junction/subproject/extra_conf.yml
@@ -0,0 +1,16 @@
+elements:
+  junction:
+    variables:
+      included_override: True
+  manual:
+    variables:
+      manual_included_override: True
+    config:
+      build-commands:
+        (>):
+          - "second"
+
+sources:
+  git:
+    variables:
+      from_included: True
diff --git a/tests/format/include/overrides-junction/subproject/project.conf b/tests/format/include/overrides-junction/subproject/project.conf
new file mode 100644
index 0000000..7a66554
--- /dev/null
+++ b/tests/format/include/overrides-junction/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub


[buildstream] 25/30: Fix 'first pass config' behavior for loading elements

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 356a6347db62a2c560b4aa19b6d8ff8317542aca
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Mon Jul 2 11:52:39 2018 +0200

    Fix 'first pass config' behavior for loading elements
---
 buildstream/_loader/loader.py      | 6 +++---
 buildstream/_loader/metaelement.py | 4 +++-
 buildstream/element.py             | 8 ++++----
 3 files changed, 10 insertions(+), 8 deletions(-)

diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index 3637f39..bc43180 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -448,7 +448,8 @@ class Loader():
                                    _yaml.node_get(node, Mapping, Symbol.ENVIRONMENT, default_value={}),
                                    _yaml.node_get(node, list, Symbol.ENV_NOCACHE, default_value=[]),
                                    _yaml.node_get(node, Mapping, Symbol.PUBLIC, default_value={}),
-                                   _yaml.node_get(node, Mapping, Symbol.SANDBOX, default_value={}))
+                                   _yaml.node_get(node, Mapping, Symbol.SANDBOX, default_value={}),
+                                   element_kind == 'junction')
 
         # Cache it now, make sure it's already there before recursing
         self._meta_elements[element_name] = meta_element
@@ -521,8 +522,7 @@ class Loader():
                             "{}: Expected junction but element kind is {}".format(filename, meta_element.kind))
 
         platform = Platform.get_platform()
-        element = Element._new_from_meta(meta_element, platform.artifactcache,
-                                         first_pass=True)
+        element = Element._new_from_meta(meta_element, platform.artifactcache)
         element._preflight()
 
         for source in element.sources():
diff --git a/buildstream/_loader/metaelement.py b/buildstream/_loader/metaelement.py
index 16788e9..b846546 100644
--- a/buildstream/_loader/metaelement.py
+++ b/buildstream/_loader/metaelement.py
@@ -38,7 +38,8 @@ class MetaElement():
     #    sandbox: Configuration specific to the sandbox environment
     #
     def __init__(self, project, name, kind, provenance, sources, config,
-                 variables, environment, env_nocache, public, sandbox):
+                 variables, environment, env_nocache, public, sandbox,
+                 first_pass):
         self.project = project
         self.name = name
         self.kind = kind
@@ -52,3 +53,4 @@ class MetaElement():
         self.sandbox = sandbox
         self.build_dependencies = []
         self.dependencies = []
+        self.first_pass = first_pass
diff --git a/buildstream/element.py b/buildstream/element.py
index 7365894..aa49484 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -891,9 +891,9 @@ class Element(Plugin):
     #    (Element): A newly created Element instance
     #
     @classmethod
-    def _new_from_meta(cls, meta, artifacts, first_pass=False):
+    def _new_from_meta(cls, meta, artifacts):
 
-        if first_pass:
+        if meta.first_pass:
             plugins = meta.project.first_pass_config.plugins
         else:
             plugins = meta.project.plugins
@@ -917,10 +917,10 @@ class Element(Plugin):
 
         # Instantiate dependencies
         for meta_dep in meta.dependencies:
-            dependency = Element._new_from_meta(meta_dep, artifacts, first_pass=first_pass)
+            dependency = Element._new_from_meta(meta_dep, artifacts)
             element.__runtime_dependencies.append(dependency)
         for meta_dep in meta.build_dependencies:
-            dependency = Element._new_from_meta(meta_dep, artifacts, first_pass=first_pass)
+            dependency = Element._new_from_meta(meta_dep, artifacts)
             element.__build_dependencies.append(dependency)
 
         return element


[buildstream] 30/30: Inject environment to bwrap through its command line

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 6716a56c4092e7b0162ff6f08f6d9c35e2b05a6f
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Sat Jul 7 15:57:01 2018 +0200

    Inject environment to bwrap through its command line
---
 buildstream/sandbox/_sandboxbwrap.py | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/buildstream/sandbox/_sandboxbwrap.py b/buildstream/sandbox/_sandboxbwrap.py
index 3a0645a..a068569 100644
--- a/buildstream/sandbox/_sandboxbwrap.py
+++ b/buildstream/sandbox/_sandboxbwrap.py
@@ -152,6 +152,9 @@ class SandboxBwrap(Sandbox):
                 gid = self._get_config().build_gid
                 bwrap_command += ['--uid', str(uid), '--gid', str(gid)]
 
+        for k, v in env.items():
+            bwrap_command += ['--setenv', k, v]
+
         # Add the command
         bwrap_command += command
 


[buildstream] 01/30: Move tests.frontend.generate_junction to test.testutils

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 2eaff21f162e4b779e57132a477f582e424445d3
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Mon Jun 11 15:33:11 2018 +0200

    Move tests.frontend.generate_junction to test.testutils
---
 tests/frontend/__init__.py                         | 34 ----------------------
 tests/frontend/buildcheckout.py                    |  4 +--
 tests/frontend/fetch.py                            |  4 +--
 tests/frontend/pull.py                             |  3 +-
 tests/frontend/push.py                             |  4 +--
 tests/frontend/show.py                             |  4 +--
 tests/frontend/track.py                            |  4 +--
 tests/frontend/track_cross_junction.py             |  4 +--
 tests/testutils/__init__.py                        |  1 +
 .../__init__.py => testutils/junction.py}          |  8 -----
 10 files changed, 13 insertions(+), 57 deletions(-)

diff --git a/tests/frontend/__init__.py b/tests/frontend/__init__.py
index 2eadf15..8cf7625 100644
--- a/tests/frontend/__init__.py
+++ b/tests/frontend/__init__.py
@@ -1,5 +1,4 @@
 import os
-from tests.testutils import create_repo
 from buildstream import _yaml
 
 
@@ -9,36 +8,3 @@ def configure_project(path, config):
     config['name'] = 'test'
     config['element-path'] = 'elements'
     _yaml.dump(config, os.path.join(path, 'project.conf'))
-
-
-# generate_junction()
-#
-# Generates a junction element with a git repository
-#
-# Args:
-#    tmpdir: The tmpdir fixture, for storing the generated git repo
-#    subproject_path: The path for the subproject, to add to the git repo
-#    junction_path: The location to store the generated junction element
-#    store_ref: Whether to store the ref in the junction.bst file
-#
-# Returns:
-#    (str): The ref
-#
-def generate_junction(tmpdir, subproject_path, junction_path, *, store_ref=True):
-    # Create a repo to hold the subproject and generate
-    # a junction element for it
-    #
-    repo = create_repo('git', str(tmpdir))
-    source_ref = ref = repo.create(subproject_path)
-    if not store_ref:
-        source_ref = None
-
-    element = {
-        'kind': 'junction',
-        'sources': [
-            repo.source_config(ref=source_ref)
-        ]
-    }
-    _yaml.dump(element, junction_path)
-
-    return ref
diff --git a/tests/frontend/buildcheckout.py b/tests/frontend/buildcheckout.py
index 5b46d3d..a65365e 100644
--- a/tests/frontend/buildcheckout.py
+++ b/tests/frontend/buildcheckout.py
@@ -1,11 +1,11 @@
 import os
 import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
 
 from buildstream import _yaml
 from buildstream._exceptions import ErrorDomain, LoadErrorReason
 
-from . import configure_project, generate_junction
+from . import configure_project
 
 # Project directory
 DATA_DIR = os.path.join(
diff --git a/tests/frontend/fetch.py b/tests/frontend/fetch.py
index ee3a3c3..e896f4a 100644
--- a/tests/frontend/fetch.py
+++ b/tests/frontend/fetch.py
@@ -1,11 +1,11 @@
 import os
 import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
 
 from buildstream import _yaml
 from buildstream._exceptions import ErrorDomain, LoadErrorReason
 
-from . import configure_project, generate_junction
+from . import configure_project
 
 # Project directory
 TOP_DIR = os.path.dirname(os.path.realpath(__file__))
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index c43cc83..d7b5a60 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -1,10 +1,9 @@
 import os
 import shutil
 import pytest
-from tests.testutils import cli, create_artifact_share
+from tests.testutils import cli, create_artifact_share, generate_junction
 from tests.testutils.site import IS_LINUX
 
-from . import generate_junction
 
 # Project directory
 DATA_DIR = os.path.join(
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index ca46b04..f76a6c4 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -5,9 +5,9 @@ from collections import namedtuple
 from unittest.mock import MagicMock
 
 from buildstream._exceptions import ErrorDomain
-from tests.testutils import cli, create_artifact_share, create_element_size
+from tests.testutils import cli, create_artifact_share, create_element_size, generate_junction
 from tests.testutils.site import IS_LINUX
-from . import configure_project, generate_junction
+from . import configure_project
 
 
 # Project directory
diff --git a/tests/frontend/show.py b/tests/frontend/show.py
index 0276961..b215694 100644
--- a/tests/frontend/show.py
+++ b/tests/frontend/show.py
@@ -1,12 +1,12 @@
 import os
 import pytest
 import itertools
-from tests.testutils import cli
+from tests.testutils import cli, generate_junction
 
 from buildstream import _yaml
 from buildstream._exceptions import ErrorDomain, LoadErrorReason
 
-from . import configure_project, generate_junction
+from . import configure_project
 
 # Project directory
 DATA_DIR = os.path.join(
diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 51768d6..4e10598 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -1,11 +1,11 @@
 import os
 import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
 
 from buildstream._exceptions import ErrorDomain, LoadErrorReason
 from buildstream import _yaml
 
-from . import configure_project, generate_junction
+from . import configure_project
 
 # Project directory
 TOP_DIR = os.path.dirname(os.path.realpath(__file__))
diff --git a/tests/frontend/track_cross_junction.py b/tests/frontend/track_cross_junction.py
index 34c39dd..423edbd 100644
--- a/tests/frontend/track_cross_junction.py
+++ b/tests/frontend/track_cross_junction.py
@@ -1,10 +1,8 @@
 import os
 import pytest
-from tests.testutils import cli, create_repo, ALL_REPO_KINDS
+from tests.testutils import cli, create_repo, ALL_REPO_KINDS, generate_junction
 from buildstream import _yaml
 
-from . import generate_junction
-
 
 def generate_element(repo, element_path, dep_name=None):
     element = {
diff --git a/tests/testutils/__init__.py b/tests/testutils/__init__.py
index 93143b5..e9db949 100644
--- a/tests/testutils/__init__.py
+++ b/tests/testutils/__init__.py
@@ -2,3 +2,4 @@ from .runcli import cli, cli_integration
 from .repo import create_repo, ALL_REPO_KINDS
 from .artifactshare import create_artifact_share
 from .element_generators import create_element_size
+from .junction import generate_junction
diff --git a/tests/frontend/__init__.py b/tests/testutils/junction.py
similarity index 81%
copy from tests/frontend/__init__.py
copy to tests/testutils/junction.py
index 2eadf15..efc429e 100644
--- a/tests/frontend/__init__.py
+++ b/tests/testutils/junction.py
@@ -3,14 +3,6 @@ from tests.testutils import create_repo
 from buildstream import _yaml
 
 
-# Shared function to configure the project.conf inline
-#
-def configure_project(path, config):
-    config['name'] = 'test'
-    config['element-path'] = 'elements'
-    _yaml.dump(config, os.path.join(path, 'project.conf'))
-
-
 # generate_junction()
 #
 # Generates a junction element with a git repository


[buildstream] 03/30: Make Project owner of Loader.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit ea7bb36b997211f7d58dab8066130fd6e90499a4
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Mon Jun 11 15:57:28 2018 +0200

    Make Project owner of Loader.
---
 buildstream/_frontend/app.py      | 30 +++++++++++++---------
 buildstream/_loader/loader.py     | 54 +++++++++++++++++++++------------------
 buildstream/_pipeline.py          | 11 +++-----
 buildstream/_platform/linux.py    |  4 +--
 buildstream/_platform/platform.py |  3 +--
 buildstream/_platform/unix.py     |  4 +--
 buildstream/_project.py           |  8 +++++-
 buildstream/_stream.py            |  4 +--
 tests/loader/__init__.py          |  2 +-
 9 files changed, 65 insertions(+), 55 deletions(-)

diff --git a/buildstream/_frontend/app.py b/buildstream/_frontend/app.py
index 4675b0e..217225d 100644
--- a/buildstream/_frontend/app.py
+++ b/buildstream/_frontend/app.py
@@ -34,6 +34,7 @@ from .. import Scope
 
 # Import various buildstream internals
 from .._context import Context
+from .._platform import Platform
 from .._project import Project
 from .._exceptions import BstError, StreamError, LoadError, LoadErrorReason, AppError
 from .._message import Message, MessageType, unconditional_messages
@@ -66,6 +67,7 @@ class App():
         self.context = None        # The Context object
         self.stream = None         # The Stream object
         self.project = None        # The toplevel Project object
+        self.loader = None
         self.logger = None         # The LogLine object
         self.interactive = None    # Whether we are running in interactive mode
         self.colors = None         # Whether to use colors in logging
@@ -198,11 +200,26 @@ class App():
             if option_value is not None:
                 setattr(self.context, context_attr, option_value)
 
+        Platform.create_instance(self.context)
+
+        # Create the logger right before setting the message handler
+        self.logger = LogLine(self.context,
+                              self._content_profile,
+                              self._format_profile,
+                              self._success_profile,
+                              self._error_profile,
+                              self._detail_profile,
+                              indent=INDENT)
+
+        # Propagate pipeline feedback to the user
+        self.context.set_message_handler(self._message_handler)
+
         #
         # Load the Project
         #
         try:
             self.project = Project(directory, self.context, cli_options=self._main_options['option'])
+            self.loader = self.project.loader
         except LoadError as e:
 
             # Let's automatically start a `bst init` session in this case
@@ -217,24 +234,13 @@ class App():
         except BstError as e:
             self._error_exit(e, "Error loading project")
 
-        # Create the logger right before setting the message handler
-        self.logger = LogLine(self.context,
-                              self._content_profile,
-                              self._format_profile,
-                              self._success_profile,
-                              self._error_profile,
-                              self._detail_profile,
-                              indent=INDENT)
-
-        # Propagate pipeline feedback to the user
-        self.context.set_message_handler(self._message_handler)
-
         # Now that we have a logger and message handler,
         # we can override the global exception hook.
         sys.excepthook = self._global_exception_handler
 
         # Create the stream right away, we'll need to pass it around
         self.stream = Stream(self.context, self.project, self._session_start,
+                             self.loader,
                              session_start_callback=self.session_start_cb,
                              interrupt_callback=self._interrupt_handler,
                              ticker_callback=self._tick,
diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index 11afef6..a190f17 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -25,7 +25,6 @@ import shutil
 
 from .._exceptions import LoadError, LoadErrorReason
 from .. import Consistency
-from .._project import Project
 from .. import _yaml
 from ..element import Element
 from .._profile import Topics, profile_start, profile_end
@@ -49,11 +48,10 @@ from . import MetaSource
 #    parent (Loader): A parent Loader object, in the case this is a junctioned Loader
 #    tempdir (str): A directory to cleanup with the Loader, given to the loader by a parent
 #                   loader in the case that this loader is a subproject loader.
-#    fetch_subprojects (bool): Whether to fetch subprojects while loading
 #
 class Loader():
 
-    def __init__(self, context, project, *, parent=None, tempdir=None, fetch_subprojects=False):
+    def __init__(self, context, project, *, parent=None, tempdir=None):
 
         # Ensure we have an absolute path for the base directory
         basedir = project.element_path
@@ -68,7 +66,6 @@ class Loader():
         #
         # Private members
         #
-        self._fetch_subprojects = fetch_subprojects
         self._context = context
         self._options = project.options      # Project options (OptionPool)
         self._basedir = basedir              # Base project directory
@@ -88,11 +85,12 @@ class Loader():
     #                       this is a bit more expensive due to deep copies
     #    ticker (callable): An optional function for tracking load progress
     #    targets (list of str): Target, element-path relative bst filenames in the project
+    #    fetch_subprojects (bool): Whether to fetch subprojects while loading
     #
     # Raises: LoadError
     #
     # Returns: The toplevel LoadElement
-    def load(self, targets, rewritable=False, ticker=None):
+    def load(self, targets, rewritable=False, ticker=None, fetch_subprojects=False):
 
         for filename in targets:
             if os.path.isabs(filename):
@@ -109,8 +107,9 @@ class Loader():
 
         for target in targets:
             profile_start(Topics.LOAD_PROJECT, target)
-            junction, name, loader = self._parse_name(target, rewritable, ticker)
-            loader._load_file(name, rewritable, ticker)
+            junction, name, loader = self._parse_name(target, rewritable, ticker,
+                                                      fetch_subprojects=fetch_subprojects)
+            loader._load_file(name, rewritable, ticker, fetch_subprojects)
             deps.append(Dependency(name, junction=junction))
             profile_end(Topics.LOAD_PROJECT, target)
 
@@ -136,7 +135,8 @@ class Loader():
         #
         for target in targets:
             profile_start(Topics.SORT_DEPENDENCIES, target)
-            junction, name, loader = self._parse_name(target, rewritable, ticker)
+            junction, name, loader = self._parse_name(target, rewritable, ticker,
+                                                      fetch_subprojects=fetch_subprojects)
             loader._sort_dependencies(name)
             profile_end(Topics.SORT_DEPENDENCIES, target)
             # Finally, wrap what we have into LoadElements and return the target
@@ -197,11 +197,12 @@ class Loader():
     #    filename (str): The element-path relative bst file
     #    rewritable (bool): Whether we should load in round trippable mode
     #    ticker (callable): A callback to report loaded filenames to the frontend
+    #    fetch_subprojects (bool): Whether to fetch subprojects while loading
     #
     # Returns:
     #    (LoadElement): A loaded LoadElement
     #
-    def _load_file(self, filename, rewritable, ticker):
+    def _load_file(self, filename, rewritable, ticker, fetch_subprojects):
 
         # Silently ignore already loaded files
         if filename in self._elements:
@@ -249,12 +250,13 @@ class Loader():
         # Load all dependency files for the new LoadElement
         for dep in element.deps:
             if dep.junction:
-                self._load_file(dep.junction, rewritable, ticker)
-                loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker)
+                self._load_file(dep.junction, rewritable, ticker, fetch_subprojects)
+                loader = self._get_loader(dep.junction, rewritable=rewritable, ticker=ticker,
+                                          fetch_subprojects=fetch_subprojects)
             else:
                 loader = self
 
-            dep_element = loader._load_file(dep.name, rewritable, ticker)
+            dep_element = loader._load_file(dep.name, rewritable, ticker, fetch_subprojects)
 
             if _yaml.node_get(dep_element.node, str, Symbol.KIND) == 'junction':
                 raise LoadError(LoadErrorReason.INVALID_DATA,
@@ -453,11 +455,12 @@ class Loader():
     #
     # Args:
     #    filename (str): Junction name
+    #    fetch_subprojects (bool): Whether to fetch subprojects while loading
     #
     # Raises: LoadError
     #
     # Returns: A Loader or None if specified junction does not exist
-    def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0):
+    def _get_loader(self, filename, *, rewritable=False, ticker=None, level=0, fetch_subprojects=False):
         # return previously determined result
         if filename in self._loaders:
             loader = self._loaders[filename]
@@ -474,13 +477,14 @@ class Loader():
         if self._parent:
             # junctions in the parent take precedence over junctions defined
             # in subprojects
-            loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker, level=level + 1)
+            loader = self._parent._get_loader(filename, rewritable=rewritable, ticker=ticker,
+                                              level=level + 1, fetch_subprojects=fetch_subprojects)
             if loader:
                 self._loaders[filename] = loader
                 return loader
 
         try:
-            self._load_file(filename, rewritable, ticker)
+            self._load_file(filename, rewritable, ticker, fetch_subprojects)
         except LoadError as e:
             if e.reason != LoadErrorReason.MISSING_FILE:
                 # other load error
@@ -509,7 +513,7 @@ class Loader():
             # Handle the case where a subproject needs to be fetched
             #
             if source.get_consistency() == Consistency.RESOLVED:
-                if self._fetch_subprojects:
+                if fetch_subprojects:
                     if ticker:
                         ticker(filename, 'Fetching subproject from {} source'.format(source.get_kind()))
                     source.fetch()
@@ -535,7 +539,9 @@ class Loader():
         # Load the project
         project_dir = os.path.join(basedir, element.path)
         try:
-            project = Project(project_dir, self._context, junction=element)
+            from .._project import Project
+            project = Project(project_dir, self._context, junction=element,
+                              parent_loader=self, tempdir=basedir)
         except LoadError as e:
             if e.reason == LoadErrorReason.MISSING_PROJECT_CONF:
                 raise LoadError(reason=LoadErrorReason.INVALID_JUNCTION,
@@ -545,11 +551,7 @@ class Loader():
             else:
                 raise
 
-        loader = Loader(self._context, project,
-                        parent=self,
-                        tempdir=basedir,
-                        fetch_subprojects=self._fetch_subprojects)
-
+        loader = project.loader
         self._loaders[filename] = loader
 
         return loader
@@ -580,13 +582,14 @@ class Loader():
     #   rewritable (bool): Whether the loaded files should be rewritable
     #                      this is a bit more expensive due to deep copies
     #   ticker (callable): An optional function for tracking load progress
+    #   fetch_subprojects (bool): Whether to fetch subprojects while loading
     #
     # Returns:
     #   (tuple): - (str): name of the junction element
     #            - (str): name of the element
     #            - (Loader): loader for sub-project
     #
-    def _parse_name(self, name, rewritable, ticker):
+    def _parse_name(self, name, rewritable, ticker, fetch_subprojects=False):
         # We allow to split only once since deep junctions names are forbidden.
         # Users who want to refer to elements in sub-sub-projects are required
         # to create junctions on the top level project.
@@ -594,6 +597,7 @@ class Loader():
         if len(junction_path) == 1:
             return None, junction_path[-1], self
         else:
-            self._load_file(junction_path[-2], rewritable, ticker)
-            loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker)
+            self._load_file(junction_path[-2], rewritable, ticker, fetch_subprojects)
+            loader = self._get_loader(junction_path[-2], rewritable=rewritable, ticker=ticker,
+                                      fetch_subprojects=fetch_subprojects)
             return junction_path[-2], junction_path[-1], loader
diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 1474b37..909ae24 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -25,7 +25,6 @@ from operator import itemgetter
 
 from ._exceptions import PipelineError
 from ._message import Message, MessageType
-from ._loader import Loader
 from ._profile import Topics, profile_start, profile_end
 from .element import Element
 from . import Scope, Consistency
@@ -77,7 +76,6 @@ class Pipeline():
         # Private members
         #
         self._artifacts = artifacts
-        self._loader = None
 
     # load()
     #
@@ -106,11 +104,9 @@ class Pipeline():
 
         profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in targets))
 
-        self._loader = Loader(self._context, self._project,
-                              fetch_subprojects=fetch_subprojects)
-
         with self._context.timed_activity("Loading pipeline", silent_nested=True):
-            meta_elements = self._loader.load(targets, rewritable, None)
+            meta_elements = self._project.loader.load(targets, rewritable, None,
+                                                      fetch_subprojects=fetch_subprojects)
 
         # Resolve the real elements now that we've loaded the project
         with self._context.timed_activity("Resolving pipeline"):
@@ -388,8 +384,7 @@ class Pipeline():
     # Cleans up resources used by the Pipeline.
     #
     def cleanup(self):
-        if self._loader:
-            self._loader.cleanup()
+        self._project.loader.cleanup()
 
         # Reset the element loader state
         Element._reset_load_state()
diff --git a/buildstream/_platform/linux.py b/buildstream/_platform/linux.py
index fec512b..e6541e8 100644
--- a/buildstream/_platform/linux.py
+++ b/buildstream/_platform/linux.py
@@ -30,9 +30,9 @@ from . import Platform
 
 class Linux(Platform):
 
-    def __init__(self, context, project):
+    def __init__(self, context):
 
-        super().__init__(context, project)
+        super().__init__(context)
 
         self._die_with_parent_available = _site.check_bwrap_version(0, 1, 8)
         self._user_ns_available = self._check_user_ns_available(context)
diff --git a/buildstream/_platform/platform.py b/buildstream/_platform/platform.py
index 29da335..8a074eb 100644
--- a/buildstream/_platform/platform.py
+++ b/buildstream/_platform/platform.py
@@ -35,9 +35,8 @@ class Platform():
     # Args:
     #     context (context): The project context
     #
-    def __init__(self, context, project):
+    def __init__(self, context):
         self.context = context
-        self.project = project
 
     @classmethod
     def create_instance(cls, *args, **kwargs):
diff --git a/buildstream/_platform/unix.py b/buildstream/_platform/unix.py
index 8b1d2ec..edbd355 100644
--- a/buildstream/_platform/unix.py
+++ b/buildstream/_platform/unix.py
@@ -28,9 +28,9 @@ from . import Platform
 
 class Unix(Platform):
 
-    def __init__(self, context, project):
+    def __init__(self, context):
 
-        super().__init__(context, project)
+        super().__init__(context)
         self._artifact_cache = TarCache(context)
 
         # Not necessarily 100% reliable, but we want to fail early.
diff --git a/buildstream/_project.py b/buildstream/_project.py
index b568cf8..36ae5b2 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -33,6 +33,7 @@ from ._elementfactory import ElementFactory
 from ._sourcefactory import SourceFactory
 from ._projectrefs import ProjectRefs, ProjectRefStorage
 from ._versions import BST_FORMAT_VERSION
+from ._loader import Loader
 
 
 # The separator we use for user specified aliases
@@ -70,7 +71,8 @@ class HostMount():
 #
 class Project():
 
-    def __init__(self, directory, context, *, junction=None, cli_options=None):
+    def __init__(self, directory, context, *, junction=None, cli_options=None,
+                 parent_loader=None, tempdir=None):
 
         # The project name
         self.name = None
@@ -118,6 +120,10 @@ class Project():
 
         self._context.add_project(self)
 
+        self.loader = Loader(self._context, self,
+                             parent=parent_loader,
+                             tempdir=tempdir)
+
     # translate_url():
     #
     # Translates the given url which may be specified with an alias
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index 5013daf..48d3571 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -44,6 +44,7 @@ from . import Scope, Consistency
 #    context (Context): The Context object
 #    project (Project): The Project object
 #    session_start (datetime): The time when the session started
+#    loader (Loader): The Loader object
 #    session_start_callback (callable): A callback to invoke when the session starts
 #    interrupt_callback (callable): A callback to invoke when we get interrupted
 #    ticker_callback (callable): Invoked every second while running the scheduler
@@ -52,7 +53,7 @@ from . import Scope, Consistency
 #
 class Stream():
 
-    def __init__(self, context, project, session_start, *,
+    def __init__(self, context, project, session_start, loader, *,
                  session_start_callback=None,
                  interrupt_callback=None,
                  ticker_callback=None,
@@ -70,7 +71,6 @@ class Stream():
         #
         # Private members
         #
-        Platform.create_instance(context, project)
         self._platform = Platform.get_platform()
         self._artifacts = self._platform.artifactcache
         self._context = context
diff --git a/tests/loader/__init__.py b/tests/loader/__init__.py
index 49db9cf..fcefdac 100644
--- a/tests/loader/__init__.py
+++ b/tests/loader/__init__.py
@@ -11,4 +11,4 @@ from buildstream._loader import Loader
 def make_loader(basedir):
     context = Context()
     project = Project(basedir, context)
-    return Loader(context, project)
+    return project.loader


[buildstream] 20/30: buildstream/element.py: Convert an exception to an assert.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 7eccd568b857954231697d1f82713c3f7a5f95dd
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Thu Jun 28 15:35:30 2018 +0200

    buildstream/element.py: Convert an exception to an assert.
    
    This situation should not happen.
---
 buildstream/element.py | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)

diff --git a/buildstream/element.py b/buildstream/element.py
index 30665e3..7365894 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -2112,12 +2112,9 @@ class Element(Plugin):
 
         if self.__is_junction:
             splits = _yaml.node_chain_copy(element_splits)
-        elif project._splits is None:
-            raise LoadError(LoadErrorReason.INVALID_DATA,
-                            "{}: Project was not fully loaded while loading element. "
-                            "Only non-artifact elements (e.g. junctions) are allowed in this context."
-                            .format(self.name))
         else:
+            assert project._splits is not None
+
             splits = _yaml.node_chain_copy(project._splits)
             # Extend project wide split rules with any split rules defined by the element
             _yaml.composite(splits, element_splits)


[buildstream] 16/30: Move loading and cleaning of elements from Pipeline to Project.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 63aecde73494e5f830c7b8a3511a76b11a203b91
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 19:12:06 2018 +0200

    Move loading and cleaning of elements from Pipeline to Project.
---
 buildstream/_pipeline.py | 36 +++----------------------------
 buildstream/_project.py  | 55 ++++++++++++++++++++++++++++++++++++++++++++++++
 buildstream/_stream.py   |  4 ++--
 3 files changed, 60 insertions(+), 35 deletions(-)

diff --git a/buildstream/_pipeline.py b/buildstream/_pipeline.py
index 909ae24..22760a4 100644
--- a/buildstream/_pipeline.py
+++ b/buildstream/_pipeline.py
@@ -26,7 +26,6 @@ from operator import itemgetter
 from ._exceptions import PipelineError
 from ._message import Message, MessageType
 from ._profile import Topics, profile_start, profile_end
-from .element import Element
 from . import Scope, Consistency
 from ._project import ProjectRefStorage
 
@@ -104,28 +103,9 @@ class Pipeline():
 
         profile_start(Topics.LOAD_PIPELINE, "_".join(t.replace(os.sep, '-') for t in targets))
 
-        with self._context.timed_activity("Loading pipeline", silent_nested=True):
-            meta_elements = self._project.loader.load(targets, rewritable, None,
-                                                      fetch_subprojects=fetch_subprojects)
-
-        # Resolve the real elements now that we've loaded the project
-        with self._context.timed_activity("Resolving pipeline"):
-            elements = [
-                Element._new_from_meta(meta, self._artifacts)
-                for meta in meta_elements
-            ]
-
-        # Now warn about any redundant source references which may have
-        # been discovered in the resolve() phase.
-        redundant_refs = Element._get_redundant_source_refs()
-        if redundant_refs:
-            detail = "The following inline specified source references will be ignored:\n\n"
-            lines = [
-                "{}:{}".format(source._get_provenance(), ref)
-                for source, ref in redundant_refs
-            ]
-            detail += "\n".join(lines)
-            self._message(MessageType.WARN, "Ignoring redundant source references", detail=detail)
+        elements = self._project.load_elements(targets, self._artifacts,
+                                               rewritable=rewritable,
+                                               fetch_subprojects=fetch_subprojects)
 
         # Now create element groups to match the input target groups
         elt_iter = iter(elements)
@@ -379,16 +359,6 @@ class Pipeline():
                 detail += "  " + element._get_full_name() + "\n"
             raise PipelineError("Inconsistent pipeline", detail=detail, reason="inconsistent-pipeline")
 
-    # cleanup()
-    #
-    # Cleans up resources used by the Pipeline.
-    #
-    def cleanup(self):
-        self._project.loader.cleanup()
-
-        # Reset the element loader state
-        Element._reset_load_state()
-
     #############################################################
     #                     Private Methods                       #
     #############################################################
diff --git a/buildstream/_project.py b/buildstream/_project.py
index f58a9c9..7aa72b6 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -35,6 +35,8 @@ from ._projectrefs import ProjectRefs, ProjectRefStorage
 from ._versions import BST_FORMAT_VERSION
 from ._loader import Loader
 from ._includes import Includes
+from .element import Element
+from ._message import Message, MessageType
 
 
 # The separator we use for user specified aliases
@@ -350,6 +352,59 @@ class Project():
 
         return self._cache_key
 
+    # load_elements()
+    #
+    # Loads elements from target names.
+    #
+    # Args:
+    #    targets (list): Target names
+    #    artifacts (ArtifactCache): Artifact cache
+    #    rewritable (bool): Whether the loaded files should be rewritable
+    #                       this is a bit more expensive due to deep copies
+    #    fetch_subprojects (bool): Whether we should fetch subprojects as a part of the
+    #                              loading process, if they are not yet locally cached
+    #
+    # Returns:
+    #    (list): A list of loaded Element
+    #
+    def load_elements(self, targets, artifacts, *,
+                      rewritable=False, fetch_subprojects=False):
+        with self._context.timed_activity("Loading elements", silent_nested=True):
+            meta_elements = self.loader.load(targets, rewritable=rewritable,
+                                             ticker=None,
+                                             fetch_subprojects=fetch_subprojects)
+
+        with self._context.timed_activity("Resolving elements"):
+            elements = [
+                Element._new_from_meta(meta, artifacts)
+                for meta in meta_elements
+            ]
+
+        # Now warn about any redundant source references which may have
+        # been discovered in the resolve() phase.
+        redundant_refs = Element._get_redundant_source_refs()
+        if redundant_refs:
+            detail = "The following inline specified source references will be ignored:\n\n"
+            lines = [
+                "{}:{}".format(source._get_provenance(), ref)
+                for source, ref in redundant_refs
+            ]
+            detail += "\n".join(lines)
+            self._context.message(
+                Message(None, MessageType.WARN, "Ignoring redundant source references", detail=detail))
+
+        return elements
+
+    # cleanup()
+    #
+    # Cleans up resources used loading elements
+    #
+    def cleanup(self):
+        self.loader.cleanup()
+
+        # Reset the element loader state
+        Element._reset_load_state()
+
     # _load():
     #
     # Loads the project configuration file in the project directory.
diff --git a/buildstream/_stream.py b/buildstream/_stream.py
index 4801ecc..28afae3 100644
--- a/buildstream/_stream.py
+++ b/buildstream/_stream.py
@@ -90,8 +90,8 @@ class Stream():
     # Cleans up application state
     #
     def cleanup(self):
-        if self._pipeline:
-            self._pipeline.cleanup()
+        if self._project:
+            self._project.cleanup()
 
     # load_selection()
     #


[buildstream] 17/30: Adding missing test data.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 195b5804d6287ff0682074843d4310be7f6125bf
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Thu Jun 28 13:05:22 2018 +0200

    Adding missing test data.
---
 tests/format/include/conditional/element.bst    |  1 +
 tests/format/include/conditional/extra_conf.yml |  6 ++++++
 tests/format/include/conditional/project.conf   | 13 +++++++++++++
 3 files changed, 20 insertions(+)

diff --git a/tests/format/include/conditional/element.bst b/tests/format/include/conditional/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/conditional/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/conditional/extra_conf.yml b/tests/format/include/conditional/extra_conf.yml
new file mode 100644
index 0000000..dd58c98
--- /dev/null
+++ b/tests/format/include/conditional/extra_conf.yml
@@ -0,0 +1,6 @@
+variables:
+  (?):
+    - build_arch == "i586":
+        size: "4"
+    - build_arch == "x86_64":
+        size: "8"
diff --git a/tests/format/include/conditional/project.conf b/tests/format/include/conditional/project.conf
new file mode 100644
index 0000000..cb54779
--- /dev/null
+++ b/tests/format/include/conditional/project.conf
@@ -0,0 +1,13 @@
+name: test
+
+options:
+  build_arch:
+    type: arch
+    description: Architecture
+    variable: build_arch
+    values:
+      - i586
+      - x86_64
+
+(@):
+  - extra_conf.yml


[buildstream] 23/30: Make include path relative to project the including fragment is found.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit de2e7fea7d15e06cff49cf07c11f97fcf1718613
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Fri Jun 29 17:16:01 2018 +0200

    Make include path relative to project the including fragment is found.
---
 buildstream/_includes.py                           | 43 +++++++++++++---------
 tests/format/include.py                            | 19 ++++++++++
 tests/format/include/local_to_junction/element.bst |  1 +
 .../format/include/local_to_junction/project.conf  |  4 ++
 .../local_to_junction/subproject/extra_conf.yml    |  2 +
 .../local_to_junction/subproject/internal.yml      |  2 +
 .../local_to_junction/subproject/project.conf      |  1 +
 7 files changed, 55 insertions(+), 17 deletions(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index fd99a6c..0f68fc9 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -20,14 +20,20 @@ class Includes:
             for value in node:
                 self.ignore_includes(value)
 
-    def process(self, node, *, included=set()):
+    def process(self, node, *,
+                included=set(),
+                current_loader=None):
+        if current_loader is None:
+            current_loader = self._loader
+
         includes = _yaml.node_get(node, list, '(@)', default_value=None)
         if '(@)' in node:
             del node['(@)']
 
         if includes:
             for include in includes:
-                include_node, file_path = self._include_file(include)
+                include_node, file_path, sub_loader = self._include_file(include,
+                                                                         current_loader)
                 if file_path in included:
                     provenance = _yaml.node_get_provenance(node)
                     raise LoadError(LoadErrorReason.RECURSIVE_INCLUDE,
@@ -35,36 +41,39 @@ class Includes:
                                                                                    file_path))
                 try:
                     included.add(file_path)
-                    self.process(include_node, included=included)
+                    self.process(include_node, included=included,
+                                 current_loader=sub_loader)
                 finally:
                     included.remove(file_path)
                 _yaml.composite(node, include_node)
 
         for _, value in _yaml.node_items(node):
-            self._process_value(value)
+            self._process_value(value, current_loader=current_loader)
 
-    def _include_file(self, include):
+    def _include_file(self, include, loader):
         shortname = include
         if ':' in include:
             junction, include = include.split(':', 1)
-            junction_loader = self._loader._get_loader(junction, fetch_subprojects=True)
-            project = junction_loader.project
+            junction_loader = loader._get_loader(junction, fetch_subprojects=True)
+            current_loader = junction_loader
         else:
-            project = self._loader.project
+            current_loader = loader
+        project = current_loader.project
         directory = project.directory
         file_path = os.path.join(directory, include)
+        key = (current_loader, file_path)
         if file_path not in self._loaded:
-            self._loaded[file_path] = _yaml.load(os.path.join(directory, include),
-                                                 shortname=shortname,
-                                                 project=project)
-        return self._loaded[file_path], file_path
+            self._loaded[key] = _yaml.load(os.path.join(directory, include),
+                                           shortname=shortname,
+                                        project=project)
+        return self._loaded[key], file_path, current_loader
 
-    def _process_value(self, value):
+    def _process_value(self, value, *, current_loader=None):
         if isinstance(value, Mapping):
-            self.process(value)
+            self.process(value, current_loader=current_loader)
         elif isinstance(value, list):
-            self._process_list(value)
+            self._process_list(value, current_loader=current_loader)
 
-    def _process_list(self, values):
+    def _process_list(self, values, *, current_loader=None):
         for value in values:
-            self._process_value(value)
+            self._process_value(value, current_loader=current_loader)
diff --git a/tests/format/include.py b/tests/format/include.py
index 4b26c97..ba8d4a0 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -229,3 +229,22 @@ def test_inner(cli, datafiles):
     result.assert_success()
     loaded = _yaml.load_data(result.output)
     assert loaded['build_arch'] == 'x86_64'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_local_to_junction(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'local_to_junction')
+
+    generate_junction(tmpdir,
+                      os.path.join(project, 'subproject'),
+                      os.path.join(project, 'junction.bst'),
+                      store_ref=True)
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert loaded['included'] == 'True'
diff --git a/tests/format/include/local_to_junction/element.bst b/tests/format/include/local_to_junction/element.bst
new file mode 100644
index 0000000..4d7f702
--- /dev/null
+++ b/tests/format/include/local_to_junction/element.bst
@@ -0,0 +1 @@
+kind: manual
diff --git a/tests/format/include/local_to_junction/project.conf b/tests/format/include/local_to_junction/project.conf
new file mode 100644
index 0000000..4836c5f
--- /dev/null
+++ b/tests/format/include/local_to_junction/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+  - junction.bst:extra_conf.yml
diff --git a/tests/format/include/local_to_junction/subproject/extra_conf.yml b/tests/format/include/local_to_junction/subproject/extra_conf.yml
new file mode 100644
index 0000000..1c0b8cc
--- /dev/null
+++ b/tests/format/include/local_to_junction/subproject/extra_conf.yml
@@ -0,0 +1,2 @@
+(@):
+  - internal.yml
diff --git a/tests/format/include/local_to_junction/subproject/internal.yml b/tests/format/include/local_to_junction/subproject/internal.yml
new file mode 100644
index 0000000..404ecd6
--- /dev/null
+++ b/tests/format/include/local_to_junction/subproject/internal.yml
@@ -0,0 +1,2 @@
+variables:
+  included: 'True'
diff --git a/tests/format/include/local_to_junction/subproject/project.conf b/tests/format/include/local_to_junction/subproject/project.conf
new file mode 100644
index 0000000..7a66554
--- /dev/null
+++ b/tests/format/include/local_to_junction/subproject/project.conf
@@ -0,0 +1 @@
+name: test-sub


[buildstream] 13/30: tests/frontend/track.py: Add tests for tracking on included fragments.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 7c4a1fed1adbb655fbafeaacfe1206eac26c5023
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 19:07:35 2018 +0200

    tests/frontend/track.py: Add tests for tracking on included fragments.
---
 tests/frontend/track.py | 132 ++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 132 insertions(+)

diff --git a/tests/frontend/track.py b/tests/frontend/track.py
index 4e10598..1cf962f 100644
--- a/tests/frontend/track.py
+++ b/tests/frontend/track.py
@@ -480,3 +480,135 @@ def test_cross_junction(cli, tmpdir, datafiles, ref_storage, kind):
         assert cli.get_element_state(project, 'junction.bst:import-etc-repo.bst') == 'buildable'
 
         assert os.path.exists(os.path.join(project, 'project.refs'))
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_include(cli, tmpdir, datafiles, ref_storage, kind):
+    project = os.path.join(datafiles.dirname, datafiles.basename)
+    dev_files_path = os.path.join(project, 'files', 'dev-files')
+    element_path = os.path.join(project, 'elements')
+    element_name = 'track-test-{}.bst'.format(kind)
+
+    configure_project(project, {
+        'ref-storage': ref_storage
+    })
+
+    # Create our repo object of the given source type with
+    # the dev files, and then collect the initial ref.
+    #
+    repo = create_repo(kind, str(tmpdir))
+    ref = repo.create(dev_files_path)
+
+    # Generate the element
+    element = {
+        'kind': 'import',
+        '(@)': ['elements/sources.yml']
+    }
+    sources = {
+        'sources': [
+            repo.source_config()
+        ]
+    }
+
+    _yaml.dump(element, os.path.join(element_path, element_name))
+    _yaml.dump(sources, os.path.join(element_path, 'sources.yml'))
+
+    # Assert that a fetch is needed
+    assert cli.get_element_state(project, element_name) == 'no reference'
+
+    # Now first try to track it
+    result = cli.run(project=project, args=['track', element_name])
+    result.assert_success()
+
+    # And now fetch it: The Source has probably already cached the
+    # latest ref locally, but it is not required to have cached
+    # the associated content of the latest ref at track time, that
+    # is the job of fetch.
+    result = cli.run(project=project, args=['fetch', element_name])
+    result.assert_success()
+
+    # Assert that we are now buildable because the source is
+    # now cached.
+    assert cli.get_element_state(project, element_name) == 'buildable'
+
+    # Assert there was a project.refs created, depending on the configuration
+    if ref_storage == 'project.refs':
+        assert os.path.exists(os.path.join(project, 'project.refs'))
+    else:
+        assert not os.path.exists(os.path.join(project, 'project.refs'))
+        new_sources = _yaml.load(os.path.join(element_path, 'sources.yml'))
+        assert 'sources' in new_sources
+        assert len(new_sources['sources']) == 1
+        assert 'ref' in new_sources['sources'][0]
+        assert ref == new_sources['sources'][0]['ref']
+
+
+@pytest.mark.datafiles(DATA_DIR)
+@pytest.mark.parametrize("ref_storage", [('inline'), ('project.refs')])
+@pytest.mark.parametrize("kind", [(kind) for kind in ALL_REPO_KINDS])
+def test_track_include_junction(cli, tmpdir, datafiles, ref_storage, kind):
+    project = os.path.join(datafiles.dirname, datafiles.basename)
+    dev_files_path = os.path.join(project, 'files', 'dev-files')
+    element_path = os.path.join(project, 'elements')
+    element_name = 'track-test-{}.bst'.format(kind)
+    subproject_path = os.path.join(project, 'files', 'sub-project')
+    sub_element_path = os.path.join(subproject_path, 'elements')
+    junction_path = os.path.join(element_path, 'junction.bst')
+
+    configure_project(project, {
+        'ref-storage': ref_storage
+    })
+
+    # Create our repo object of the given source type with
+    # the dev files, and then collect the initial ref.
+    #
+    repo = create_repo(kind, str(tmpdir.join('element_repo')))
+    ref = repo.create(dev_files_path)
+
+    # Generate the element
+    element = {
+        'kind': 'import',
+        '(@)': ['junction.bst:elements/sources.yml']
+    }
+    sources = {
+        'sources': [
+            repo.source_config()
+        ]
+    }
+
+    _yaml.dump(element, os.path.join(element_path, element_name))
+    _yaml.dump(sources, os.path.join(sub_element_path, 'sources.yml'))
+
+    generate_junction(str(tmpdir.join('junction_repo')),
+                      subproject_path, junction_path, store_ref=True)
+
+    result = cli.run(project=project, args=['track', 'junction.bst'])
+    result.assert_success()
+
+    # Assert that a fetch is needed
+    assert cli.get_element_state(project, element_name) == 'no reference'
+
+    # Now first try to track it
+    result = cli.run(project=project, args=['track', element_name])
+
+    # Assert there was a project.refs created, depending on the configuration
+    if ref_storage == 'inline':
+        # FIXME: We should expect an error. But only a warning is emitted
+        # result.assert_main_error(ErrorDomain.SOURCE, 'tracking-junction-fragment')
+
+        assert 'junction.bst:elements/sources.yml: Cannot track source in a fragment from a junction' in result.stderr
+    else:
+        assert os.path.exists(os.path.join(project, 'project.refs'))
+
+        # And now fetch it: The Source has probably already cached the
+        # latest ref locally, but it is not required to have cached
+        # the associated content of the latest ref at track time, that
+        # is the job of fetch.
+        result = cli.run(project=project, args=['fetch', element_name])
+        result.assert_success()
+
+        # Assert that we are now buildable because the source is
+        # now cached.
+        assert cli.get_element_state(project, element_name) == 'buildable'


[buildstream] 14/30: tests/format/include.py: Test use of conditionals in included fragment.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit da42c7d4a4922b3ea41e969aa3c25b1f331b25be
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 19:08:43 2018 +0200

    tests/format/include.py: Test use of conditionals in included fragment.
---
 tests/format/include.py | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)

diff --git a/tests/format/include.py b/tests/format/include.py
index b09830a..7a1a15a 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -187,3 +187,19 @@ def test_junction_do_not_use_included_overrides(cli, tmpdir, datafiles):
     loaded = _yaml.load_data(result.output)
     assert 'main_override' in loaded
     assert 'included_override' not in loaded
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_conditional_in_fragment(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'conditional')
+
+    result = cli.run(project=project, args=[
+        '-o', 'build_arch', 'x86_64',
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_success()
+    loaded = _yaml.load_data(result.output)
+    assert 'size' in loaded
+    assert loaded['size'] == '8'


[buildstream] 06/30: Add documentation for include directive.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit d50ebc6b23a6df8b8759fe5510d15dd3379d01ee
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 13 13:44:06 2018 +0200

    Add documentation for include directive.
    
    Documents #331.
---
 doc/source/format_intro.rst | 33 +++++++++++++++++++++++++++++++++
 1 file changed, 33 insertions(+)

diff --git a/doc/source/format_intro.rst b/doc/source/format_intro.rst
index b1780f9..d0a934a 100644
--- a/doc/source/format_intro.rst
+++ b/doc/source/format_intro.rst
@@ -289,3 +289,36 @@ free form and not validated.
        # This element's `make install` is broken, replace it.
        (=):
        - cp src/program %{bindir}
+
+(@) Include
+~~~~~~~~~~~
+Indicates that content should be loaded from files.
+
+The include directive expects a list of strings. Those are file names
+relative to project directory. Or they can be prefixed with a junction
+name and a colon (':'). In that case, the remain of the string is a
+file name relative to the project of the junction.
+
+The include directive can be used in ``project.conf`` or in a ``.bst``
+file.  It can also be used in a file included by another include
+directive.
+
+Included files are composed into the including file. The files should
+take care of composition using list directives.
+
+Some ``project.conf`` configuration is not overridable by includes:
+``name``, ``format-version`` and ``element-path``.
+
+Junction elements never use values from included files from
+``project.conf``.  Variables, element overrides and source overrides
+required by junctions should all be directly in the ``project.conf``.
+
+Junction elements cannot use the include directive.
+
+**Example:**
+
+.. code:: yaml
+
+   elements:
+     (@):
+       - junction.bst:includes/element-overrides.bst


[buildstream] 29/30: Give less priority to projectdata.yml than includes

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 6028168de6fc4bbf27b7e04c96a336d641536dc5
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Sat Jul 7 09:11:12 2018 +0200

    Give less priority to projectdata.yml than includes
---
 buildstream/_project.py | 53 ++++++++++++++++++++++++++-----------------------
 1 file changed, 28 insertions(+), 25 deletions(-)

diff --git a/buildstream/_project.py b/buildstream/_project.py
index 5e89f88..ccc62d8 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -273,7 +273,6 @@ class Project():
         self._partially_loaded = False
         self._fully_loaded = False
         self._project_includes = None
-        self._config_node = None
 
         profile_start(Topics.LOAD_PROJECT, self.directory.replace(os.sep, '-'))
         self._load(parent_loader=parent_loader, tempdir=tempdir)
@@ -415,19 +414,20 @@ class Project():
 
         # Load builtin default
         projectfile = os.path.join(self.directory, _PROJECT_CONF_FILE)
-        self._config_node = _yaml.load(_site.default_project_config)
+        self._default_config_node = _yaml.load(_site.default_project_config)
 
         # Load project local config and override the builtin
         try:
-            project_conf = _yaml.load(projectfile)
+            self._project_conf = _yaml.load(projectfile)
         except LoadError as e:
             # Raise a more specific error here
             raise LoadError(LoadErrorReason.MISSING_PROJECT_CONF, str(e))
 
-        _yaml.composite(self._config_node, project_conf)
+        pre_config_node = _yaml.node_copy(self._default_config_node)
+        _yaml.composite(pre_config_node, self._project_conf)
 
         # Assert project's format version early, before validating toplevel keys
-        format_version = _yaml.node_get(self._config_node, int, 'format-version')
+        format_version = _yaml.node_get(pre_config_node, int, 'format-version')
         if BST_FORMAT_VERSION < format_version:
             major, minor = utils.get_bst_version()
             raise LoadError(
@@ -437,15 +437,15 @@ class Project():
 
         # The project name, element path and option declarations
         # are constant and cannot be overridden by option conditional statements
-        self.name = _yaml.node_get(self._config_node, str, 'name')
+        self.name = _yaml.node_get(pre_config_node, str, 'name')
 
         # Validate that project name is a valid symbol name
-        _yaml.assert_symbol_name(_yaml.node_get_provenance(self._config_node, 'name'),
+        _yaml.assert_symbol_name(_yaml.node_get_provenance(pre_config_node, 'name'),
                                  self.name, "project name")
 
         self.element_path = os.path.join(
             self.directory,
-            _yaml.node_get(self._config_node, str, 'element-path')
+            _yaml.node_get(pre_config_node, str, 'element-path')
         )
 
         self.config.options = OptionPool(self.element_path)
@@ -457,15 +457,17 @@ class Project():
 
         self._project_includes = Includes(self.loader)
 
-        config_no_include = _yaml.node_copy(self._config_node)
-        self._project_includes.process(config_no_include, only_local=True)
+        project_conf_first_pass = _yaml.node_copy(self._project_conf)
+        self._project_includes.process(project_conf_first_pass, only_local=True)
+        config_no_include = _yaml.node_copy(self._default_config_node)
+        _yaml.composite(config_no_include, project_conf_first_pass)
 
         self._load_pass(config_no_include, self.first_pass_config, True)
 
         # Use separate file for storing source references
-        self.ref_storage = _yaml.node_get(self._config_node, str, 'ref-storage')
+        self.ref_storage = _yaml.node_get(pre_config_node, str, 'ref-storage')
         if self.ref_storage not in [ProjectRefStorage.INLINE, ProjectRefStorage.PROJECT_REFS]:
-            p = _yaml.node_get_provenance(self._config_node, 'ref-storage')
+            p = _yaml.node_get_provenance(pre_config_node, 'ref-storage')
             raise LoadError(LoadErrorReason.INVALID_DATA,
                             "{}: Invalid value '{}' specified for ref-storage"
                             .format(p, self.ref_storage))
@@ -482,11 +484,14 @@ class Project():
         if self.junction:
             self.junction._get_project().ensure_fully_loaded()
 
-        self._project_includes.process(self._config_node)
+        project_conf_second_pass = _yaml.node_copy(self._project_conf)
+        self._project_includes.process(project_conf_second_pass)
+        config = _yaml.node_copy(self._default_config_node)
+        _yaml.composite(config, project_conf_second_pass)
 
-        self._load_pass(self._config_node, self.config, False)
+        self._load_pass(config, self.config, False)
 
-        _yaml.node_validate(self._config_node, self.INCLUDE_CONFIG_KEYS + self.MAIN_FILE_CONFIG_KEYS)
+        _yaml.node_validate(config, self.INCLUDE_CONFIG_KEYS + self.MAIN_FILE_CONFIG_KEYS)
 
         #
         # Now all YAML composition is done, from here on we just load
@@ -494,30 +499,30 @@ class Project():
         #
 
         # Load artifacts pull/push configuration for this project
-        self.artifact_cache_specs = ArtifactCache.specs_from_config_node(self._config_node)
+        self.artifact_cache_specs = ArtifactCache.specs_from_config_node(config)
 
         # Source url aliases
-        self._aliases = _yaml.node_get(self._config_node, Mapping, 'aliases', default_value={})
+        self._aliases = _yaml.node_get(config, Mapping, 'aliases', default_value={})
 
         # Load sandbox environment variables
-        self.base_environment = _yaml.node_get(self._config_node, Mapping, 'environment')
-        self.base_env_nocache = _yaml.node_get(self._config_node, list, 'environment-nocache')
+        self.base_environment = _yaml.node_get(config, Mapping, 'environment')
+        self.base_env_nocache = _yaml.node_get(config, list, 'environment-nocache')
 
         # Load sandbox configuration
-        self._sandbox = _yaml.node_get(self._config_node, Mapping, 'sandbox')
+        self._sandbox = _yaml.node_get(config, Mapping, 'sandbox')
 
         # Load project split rules
-        self._splits = _yaml.node_get(self._config_node, Mapping, 'split-rules')
+        self._splits = _yaml.node_get(config, Mapping, 'split-rules')
 
         # Fail on overlap
-        self.fail_on_overlap = _yaml.node_get(self._config_node, bool, 'fail-on-overlap')
+        self.fail_on_overlap = _yaml.node_get(config, bool, 'fail-on-overlap')
 
         # Load project.refs if it exists, this may be ignored.
         if self.ref_storage == ProjectRefStorage.PROJECT_REFS:
             self.refs.load(self.options)
 
         # Parse shell options
-        shell_options = _yaml.node_get(self._config_node, Mapping, 'shell')
+        shell_options = _yaml.node_get(config, Mapping, 'shell')
         _yaml.node_validate(shell_options, ['command', 'environment', 'host-files'])
         self._shell_command = _yaml.node_get(shell_options, list, 'command')
 
@@ -546,8 +551,6 @@ class Project():
 
             self._shell_host_files.append(mount)
 
-        self._config_node = None
-
     # _load_pass():
     #
     # Loads parts of the project configuration that are different


[buildstream] 08/30: buildstream/_options/optionpool.py: Fix paramaters of OptionPool.load_cli_values

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit f3ed208a651a5925bafc010d313b15a046f9c497
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 16:57:13 2018 +0200

    buildstream/_options/optionpool.py: Fix paramaters of OptionPool.load_cli_values
---
 buildstream/_options/optionpool.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/buildstream/_options/optionpool.py b/buildstream/_options/optionpool.py
index 83a202f..b53e87a 100644
--- a/buildstream/_options/optionpool.py
+++ b/buildstream/_options/optionpool.py
@@ -107,8 +107,9 @@ class OptionPool():
     #
     # Args:
     #    cli_options (list): A list of (str, str) tuples
+    #    ignore_unknown (bool): Whether to silently ignore unknown options.
     #
-    def load_cli_values(self, cli_options, ignore_unknown=False):
+    def load_cli_values(self, cli_options, *, ignore_unknown=False):
         for option_name, option_value in cli_options:
             try:
                 option = self._options[option_name]


[buildstream] 18/30: Detect recursive includes.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 4789c0fde87c6d989142c8d6ee473b58f13d38be
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Thu Jun 28 13:58:14 2018 +0200

    Detect recursive includes.
---
 buildstream/_exceptions.py                     |  3 +++
 buildstream/_includes.py                       | 28 +++++++++++++++++---------
 tests/format/include.py                        | 12 +++++++++++
 tests/format/include/recursive/extra_conf.yml  |  2 ++
 tests/format/include/recursive/extra_conf2.yml |  2 ++
 tests/format/include/recursive/project.conf    |  4 ++++
 6 files changed, 41 insertions(+), 10 deletions(-)

diff --git a/buildstream/_exceptions.py b/buildstream/_exceptions.py
index 3779632..020ed88 100644
--- a/buildstream/_exceptions.py
+++ b/buildstream/_exceptions.py
@@ -200,6 +200,9 @@ class LoadErrorReason(Enum):
     # Try to load a directory not a yaml file
     LOADING_DIRECTORY = 18
 
+    # A recursive include has been encountered.
+    RECURSIVE_INCLUDE = 19
+
 
 # LoadError
 #
diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index 5b2c8aa..2bd885a 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -1,6 +1,7 @@
 import os
 from collections import Mapping
 from . import _yaml
+from ._exceptions import LoadError, LoadErrorReason
 
 
 class Includes:
@@ -9,17 +10,24 @@ class Includes:
         self._loader = loader
         self._loaded = {}
 
-    def process(self, node):
-        while True:
-            includes = _yaml.node_get(node, list, '(@)', default_value=None)
-            if '(@)' in node:
-                del node['(@)']
-
-            if not includes:
-                break
+    def process(self, node, *, included=set()):
+        includes = _yaml.node_get(node, list, '(@)', default_value=None)
+        if '(@)' in node:
+            del node['(@)']
 
+        if includes:
             for include in includes:
-                include_node = self._include_file(include)
+                include_node, file_path = self._include_file(include)
+                if file_path in included:
+                    provenance = _yaml.node_get_provenance(node)
+                    raise LoadError(LoadErrorReason.RECURSIVE_INCLUDE,
+                                    "{}: trying to recursively include {}". format(provenance,
+                                                                                   file_path))
+                try:
+                    included.add(file_path)
+                    self.process(include_node, included=included)
+                finally:
+                    included.remove(file_path)
                 _yaml.composite(node, include_node)
 
         for _, value in _yaml.node_items(node):
@@ -39,7 +47,7 @@ class Includes:
             self._loaded[file_path] = _yaml.load(os.path.join(directory, include),
                                                  shortname=shortname,
                                                  project=project)
-        return self._loaded[file_path]
+        return self._loaded[file_path], file_path
 
     def _process_value(self, value):
         if isinstance(value, Mapping):
diff --git a/tests/format/include.py b/tests/format/include.py
index 7a1a15a..8a79ed9 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -203,3 +203,15 @@ def test_conditional_in_fragment(cli, tmpdir, datafiles):
     loaded = _yaml.load_data(result.output)
     assert 'size' in loaded
     assert loaded['size'] == '8'
+
+
+@pytest.mark.datafiles(DATA_DIR)
+def test_recusive_include(cli, tmpdir, datafiles):
+    project = os.path.join(str(datafiles), 'recursive')
+
+    result = cli.run(project=project, args=[
+        'show',
+        '--deps', 'none',
+        '--format', '%{vars}',
+        'element.bst'])
+    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.RECURSIVE_INCLUDE)
diff --git a/tests/format/include/recursive/extra_conf.yml b/tests/format/include/recursive/extra_conf.yml
new file mode 100644
index 0000000..57db0d3
--- /dev/null
+++ b/tests/format/include/recursive/extra_conf.yml
@@ -0,0 +1,2 @@
+(@):
+  - extra_conf2.yml
diff --git a/tests/format/include/recursive/extra_conf2.yml b/tests/format/include/recursive/extra_conf2.yml
new file mode 100644
index 0000000..e8dd5e2
--- /dev/null
+++ b/tests/format/include/recursive/extra_conf2.yml
@@ -0,0 +1,2 @@
+(@):
+  - extra_conf.yml
diff --git a/tests/format/include/recursive/project.conf b/tests/format/include/recursive/project.conf
new file mode 100644
index 0000000..a7791a4
--- /dev/null
+++ b/tests/format/include/recursive/project.conf
@@ -0,0 +1,4 @@
+name: test
+
+(@):
+  - extra_conf.yml


[buildstream] 11/30: buildstream/_includes.py: Remove validation of fragments.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit f06c2ce4c6c6a66e1e5ddcf3acc8d6cc218a0a38
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 17:10:39 2018 +0200

    buildstream/_includes.py: Remove validation of fragments.
---
 buildstream/_includes.py |  6 +-----
 buildstream/_project.py  |  2 +-
 tests/format/include.py  | 11 -----------
 3 files changed, 2 insertions(+), 17 deletions(-)

diff --git a/buildstream/_includes.py b/buildstream/_includes.py
index eed73a0..3837c5d 100644
--- a/buildstream/_includes.py
+++ b/buildstream/_includes.py
@@ -5,9 +5,8 @@ from . import _yaml
 
 class Includes:
 
-    def __init__(self, loader, valid_keys=None):
+    def __init__(self, loader):
         self._loader = loader
-        self._valid_keys = valid_keys
         self._loaded = {}
 
     def process(self, node):
@@ -21,9 +20,6 @@ class Includes:
 
             for include in includes:
                 include_node = self._include_file(include)
-                if self._valid_keys:
-                    _yaml.node_validate(include_node, self._valid_keys)
-
                 _yaml.composite(node, include_node)
 
         for _, value in _yaml.node_items(node):
diff --git a/buildstream/_project.py b/buildstream/_project.py
index 67eba0f..e50b26a 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -405,7 +405,7 @@ class Project():
 
         self._load_pass(_yaml.node_copy(config), self.first_pass_config, True)
 
-        project_includes = Includes(self.loader, self.INCLUDE_CONFIG_KEYS + ['elements', 'sources'])
+        project_includes = Includes(self.loader)
         project_includes.process(config)
 
         self._load_pass(config, self.config, False)
diff --git a/tests/format/include.py b/tests/format/include.py
index ca6eaab..b09830a 100644
--- a/tests/format/include.py
+++ b/tests/format/include.py
@@ -60,17 +60,6 @@ def test_include_junction_options(cli, tmpdir, datafiles):
 
 
 @pytest.mark.datafiles(DATA_DIR)
-def test_include_project_defines_name(cli, datafiles):
-    project = os.path.join(str(datafiles), 'defines_name')
-    result = cli.run(project=project, args=[
-        'show',
-        '--deps', 'none',
-        '--format', '%{vars}',
-        'element.bst'])
-    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
-
-
-@pytest.mark.datafiles(DATA_DIR)
 def test_junction_element_partial_project_project(cli, tmpdir, datafiles):
     """
     Junction elements never depend on fully include processed project.


[buildstream] 15/30: Drop BST_PROJECT_INCLUDES_PROCESSED and use kind to detect junctions.

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch valentindavid/flatpak-demo
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit f221dde148cb7db257e6a10f75ac68ca35ea5979
Author: Valentin David <va...@codethink.co.uk>
AuthorDate: Wed Jun 27 19:10:37 2018 +0200

    Drop BST_PROJECT_INCLUDES_PROCESSED and use kind to detect junctions.
---
 buildstream/_loader/loader.py            | 10 ++--------
 buildstream/_project.py                  |  3 ---
 buildstream/element.py                   | 26 ++++++++++----------------
 buildstream/plugins/elements/junction.py |  1 -
 4 files changed, 12 insertions(+), 28 deletions(-)

diff --git a/buildstream/_loader/loader.py b/buildstream/_loader/loader.py
index b6221a2..3637f39 100644
--- a/buildstream/_loader/loader.py
+++ b/buildstream/_loader/loader.py
@@ -23,7 +23,7 @@ from collections import Mapping, namedtuple
 import tempfile
 import shutil
 
-from .._exceptions import LoadError, LoadErrorReason, PluginError
+from .._exceptions import LoadError, LoadErrorReason
 from .. import Consistency
 from .. import _yaml
 from ..element import Element
@@ -246,13 +246,7 @@ class Loader():
             else:
                 raise
         kind = _yaml.node_get(node, str, Symbol.KIND)
-        try:
-            kind_type, _ = self.project.first_pass_config.plugins.get_element_type(kind)
-        except PluginError:
-            kind_type = None
-        if (kind_type and
-                hasattr(kind_type, 'BST_PROJECT_INCLUDES_PROCESSED') and
-                not kind_type.BST_PROJECT_INCLUDES_PROCESSED):
+        if kind == "junction":
             self._first_pass_options.process_node(node)
         else:
             if not self.project.is_loaded():
diff --git a/buildstream/_project.py b/buildstream/_project.py
index e50b26a..f58a9c9 100644
--- a/buildstream/_project.py
+++ b/buildstream/_project.py
@@ -169,9 +169,6 @@ class PluginCollection:
         self._assert_plugin_format(element, version)
         return element
 
-    def get_element_type(self, kind):
-        return self._element_factory.lookup(kind)
-
     # create_source()
     #
     # Instantiate and return a Source
diff --git a/buildstream/element.py b/buildstream/element.py
index ae604c5..30665e3 100644
--- a/buildstream/element.py
+++ b/buildstream/element.py
@@ -191,19 +191,13 @@ class Element(Plugin):
     *Since: 1.2*
     """
 
-    BST_PROJECT_INCLUDES_PROCESSED = True
-    """Whether to load the plugin before processing include directives in
-    project.conf.
-
-    *Since: 1.2*
-
-    """
-
     def __init__(self, context, project, artifacts, meta, plugin_conf):
 
         super().__init__(meta.name, context, project, meta.provenance, "element")
 
-        if not project.is_loaded() and self.BST_PROJECT_INCLUDES_PROCESSED:
+        self.__is_junction = meta.kind == "junction"
+
+        if not project.is_loaded() and not self.__is_junction:
             raise ElementError("{}: Cannot load element before project"
                                .format(self), reason="project-not-loaded")
 
@@ -912,7 +906,7 @@ class Element(Plugin):
 
         # Instantiate sources
         for meta_source in meta.sources:
-            meta_source.first_pass = not element.BST_PROJECT_INCLUDES_PROCESSED
+            meta_source.first_pass = meta.kind == "junction"
             source = plugins.create_source(meta_source)
             redundant_ref = source._load_ref()
             element.__sources.append(source)
@@ -2116,7 +2110,7 @@ class Element(Plugin):
         element_bst = _yaml.node_get(element_public, Mapping, 'bst', default_value={})
         element_splits = _yaml.node_get(element_bst, Mapping, 'split-rules', default_value={})
 
-        if not self.BST_PROJECT_INCLUDES_PROCESSED:
+        if self.__is_junction:
             splits = _yaml.node_chain_copy(element_splits)
         elif project._splits is None:
             raise LoadError(LoadErrorReason.INVALID_DATA,
@@ -2152,7 +2146,7 @@ class Element(Plugin):
             # Override the element's defaults with element specific
             # overrides from the project.conf
             project = self._get_project()
-            if not self.BST_PROJECT_INCLUDES_PROCESSED:
+            if self.__is_junction:
                 elements = project.first_pass_config.element_overrides
             else:
                 elements = project.element_overrides
@@ -2171,7 +2165,7 @@ class Element(Plugin):
     def __extract_environment(self, meta):
         default_env = _yaml.node_get(self.__defaults, Mapping, 'environment', default_value={})
 
-        if not self.BST_PROJECT_INCLUDES_PROCESSED:
+        if self.__is_junction:
             environment = {}
         else:
             project = self._get_project()
@@ -2189,7 +2183,7 @@ class Element(Plugin):
         return final_env
 
     def __extract_env_nocache(self, meta):
-        if not self.BST_PROJECT_INCLUDES_PROCESSED:
+        if self.__is_junction:
             project_nocache = []
         else:
             project = self._get_project()
@@ -2213,7 +2207,7 @@ class Element(Plugin):
         default_vars = _yaml.node_get(self.__defaults, Mapping, 'variables', default_value={})
 
         project = self._get_project()
-        if not self.BST_PROJECT_INCLUDES_PROCESSED:
+        if self.__is_junction:
             variables = _yaml.node_chain_copy(project.first_pass_config.base_variables)
         else:
             assert project.is_loaded()
@@ -2242,7 +2236,7 @@ class Element(Plugin):
     # Sandbox-specific configuration data, to be passed to the sandbox's constructor.
     #
     def __extract_sandbox_config(self, meta):
-        if not self.BST_PROJECT_INCLUDES_PROCESSED:
+        if self.__is_junction:
             sandbox_config = {'build-uid': 0,
                               'build-gid': 0}
         else:
diff --git a/buildstream/plugins/elements/junction.py b/buildstream/plugins/elements/junction.py
index dc6e385..ee5ed24 100644
--- a/buildstream/plugins/elements/junction.py
+++ b/buildstream/plugins/elements/junction.py
@@ -136,7 +136,6 @@ class JunctionElement(Element):
     # Junctions are not allowed any dependencies
     BST_FORBID_BDEPENDS = True
     BST_FORBID_RDEPENDS = True
-    BST_PROJECT_INCLUDES_PROCESSED = False
 
     def configure(self, node):
         self.path = self.node_get_member(node, str, 'path', default='')