You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@karaf.apache.org by gg...@apache.org on 2017/12/04 10:12:45 UTC

[karaf] 11/12: [KARAF-5376] Integrate features processor with Deployer and SubsystemResolver

This is an automated email from the ASF dual-hosted git repository.

ggrzybek pushed a commit to branch KARAF-5376-overrides_v2
in repository https://gitbox.apache.org/repos/asf/karaf.git

commit 61422cc5f090e62297fe69517bb6b71f43c3197e
Author: Grzegorz Grzybek <gr...@gmail.com>
AuthorDate: Sat Dec 2 20:49:40 2017 +0100

    [KARAF-5376] Integrate features processor with Deployer and SubsystemResolver
    
    After easier part of using features processor in static assembly
    builder, now it's also used by FeaturesService, Deployer and
    SubsystemResolver
    
    * SubsystemResolver takes overrides and blacklist into account during bundle download and resolution
    * Bundle override is implemented in both MAVEN and OSGI modes
    * FeaturesService uses now enums for snapshot update kind and service requirements options
    * SubsystemResolver has pulled up some methods to interfaces
    * overrides are no longer used by deployer explicitly - they're configured at model level and checked by SubsystemResolver
    * Lots of new documentation added
    * Deployer.DeploymentState has now two maps for features - by ID and by name only (mapped to list of versions of feature for given name)
    * Deployer.DeploymentRequest no longer has overrides field
    * tests added in SubsystemTest for overrides and blacklists
    * AssemblyDeployCallback no longer uses Blacklist by itself - uses features processor instead
    * More logging added to assembly Builder (we can see what's blacklisted and what's overriden)
    * Overrides are already included in distro
---
 .../karaf/features/command/InfoFeatureCommand.java |   2 +-
 .../java/org/apache/karaf/features/BundleInfo.java |  21 +-
 .../org/apache/karaf/features/FeaturePattern.java  |   5 +
 .../org/apache/karaf/features/FeaturesService.java |  64 ++++-
 .../java/org/apache/karaf/features/Library.java    |  10 +
 .../org/apache/karaf/features/LocationPattern.java |   5 +
 .../karaf/features/internal/model/Bundle.java      |   8 +-
 .../karaf/features/internal/model/Conditional.java |   2 +
 .../karaf/features/internal/model/Features.java    |  16 +-
 .../karaf/features/internal/osgi/Activator.java    |   4 +-
 .../karaf/features/internal/region/Subsystem.java  | 242 ++++++++++++++++---
 .../internal/region/SubsystemResolveContext.java   |  12 +-
 .../internal/region/SubsystemResolver.java         |  98 ++++++--
 .../SubsystemResolverCallback.java}                |  22 +-
 .../region/SubsystemResolverResolution.java        |  82 +++++++
 .../internal/region/SubsystemResolverResult.java   |  78 ++++++
 .../internal/resolver/FeatureResource.java         |  32 ++-
 .../features/internal/resolver/ResolverUtil.java   |   5 +
 .../features/internal/resolver/ResourceUtils.java  |  11 +
 .../internal/service/BundleInstallSupport.java     |  13 +-
 .../internal/service/BundleInstallSupportImpl.java |   6 +-
 .../karaf/features/internal/service/Deployer.java  | 267 +++++++++++++++------
 .../internal/service/FeaturesProcessor.java        |   7 +
 .../internal/service/FeaturesProcessorImpl.java    |  29 ++-
 .../internal/service/FeaturesServiceConfig.java    |  33 +++
 .../internal/service/FeaturesServiceImpl.java      |  20 +-
 .../karaf/features/internal/service/Overrides.java |  32 +++
 .../features/internal/service/RepositoryImpl.java  |   1 +
 .../karaf/features/internal/service/State.java     |  19 +-
 .../karaf/features/internal/util/MapUtils.java     |  29 +++
 .../internal/region/FeaturesDependenciesTest.java  |  13 +-
 .../features/internal/region/SubsystemTest.java    | 174 +++++++++++---
 .../features/internal/service/DeployerTest.java    |  48 ++--
 .../internal/service/FeaturesProcessorTest.java    |  13 +-
 .../karaf/features/internal/region/data1/d.mf      |   6 +
 .../karaf/features/internal/region/data10/a.mf     |   5 +
 .../karaf/features/internal/region/data10/b.mf     |   5 +
 .../features/internal/region/data10/features.xml   |  26 ++
 .../karaf/features/internal/region/data3/c.mf      |   5 +
 profile/pom.xml                                    |  16 +-
 .../karaf/profile/assembly/ArtifactInstaller.java  |  16 +-
 .../profile/assembly/AssemblyDeployCallback.java   |  69 ++++--
 .../org/apache/karaf/profile/assembly/Builder.java | 114 +++++----
 .../karaf/profile/assembly/ConfigInstaller.java    |   8 +-
 .../karaf/profile/impl/ProfileBuilderImpl.java     |   2 +-
 .../apache/karaf/profile/impl/ProfilesTest.java    |  15 +-
 .../java/org/apache/karaf/tooling/VerifyMojo.java  |  54 ++---
 47 files changed, 1384 insertions(+), 380 deletions(-)

diff --git a/features/command/src/main/java/org/apache/karaf/features/command/InfoFeatureCommand.java b/features/command/src/main/java/org/apache/karaf/features/command/InfoFeatureCommand.java
index 7c6a775..a8b4606 100644
--- a/features/command/src/main/java/org/apache/karaf/features/command/InfoFeatureCommand.java
+++ b/features/command/src/main/java/org/apache/karaf/features/command/InfoFeatureCommand.java
@@ -209,7 +209,7 @@ public class InfoFeatureCommand extends FeaturesCommandSupport {
                 if(startLevel > 0) {
                     sb.append(" start-level=").append(startLevel);
                 }
-                if (featureBundle.isOverriden()) {
+                if (featureBundle.isOverriden() != BundleInfo.BundleOverrideMode.NONE) {
                     sb.append(" (overriden from " + featureBundle.getOriginalLocation() + ")");
                 }
                 System.out.println(sb.toString());
diff --git a/features/core/src/main/java/org/apache/karaf/features/BundleInfo.java b/features/core/src/main/java/org/apache/karaf/features/BundleInfo.java
index 306d8ea..7d45884 100644
--- a/features/core/src/main/java/org/apache/karaf/features/BundleInfo.java
+++ b/features/core/src/main/java/org/apache/karaf/features/BundleInfo.java
@@ -31,6 +31,25 @@ public interface BundleInfo extends Blacklisting {
 
     boolean isDependency();
 
-    boolean isOverriden();
+    BundleInfo.BundleOverrideMode isOverriden();
+
+    public enum BundleOverrideMode {
+        /**
+         * No override
+         */
+        NONE,
+
+        /**
+         * Compatibility with <code>${karaf.etc}/overrides.properties</code> - requires access to original and
+         * replacement bundle's headers to compare version and symbolic name.
+         */
+        OSGI,
+
+        /**
+         * Simpler option that's just static override - doesn't require accessing and checking the bundle/resource
+         * being overriden.
+         */
+        MAVEN
+    }
 
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/FeaturePattern.java b/features/core/src/main/java/org/apache/karaf/features/FeaturePattern.java
index a77c4df..0fbedff 100644
--- a/features/core/src/main/java/org/apache/karaf/features/FeaturePattern.java
+++ b/features/core/src/main/java/org/apache/karaf/features/FeaturePattern.java
@@ -118,4 +118,9 @@ public class FeaturePattern {
         return match;
     }
 
+    @Override
+    public String toString() {
+        return originalId;
+    }
+
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/FeaturesService.java b/features/core/src/main/java/org/apache/karaf/features/FeaturesService.java
index da8643b..e9cc072 100644
--- a/features/core/src/main/java/org/apache/karaf/features/FeaturesService.java
+++ b/features/core/src/main/java/org/apache/karaf/features/FeaturesService.java
@@ -17,10 +17,13 @@
 package org.apache.karaf.features;
 
 import java.net.URI;
+import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.Map;
 import java.util.Set;
 
+import org.osgi.namespace.service.ServiceNamespace;
+
 /**
  * The service managing features repositories.
  */
@@ -28,20 +31,13 @@ public interface FeaturesService {
 
     String ROOT_REGION = "root";
 
-    String UPDATE_SNAPSHOTS_NONE = "none";
-    String UPDATE_SNAPSHOTS_CRC = "crc";
-    String DEFAULT_UPDATE_SNAPSHOTS = UPDATE_SNAPSHOTS_CRC;
-    String UPDATE_SNAPSHOTS_ALWAYS = "always";
+    SnapshotUpdateBehavior DEFAULT_UPDATE_SNAPSHOTS = SnapshotUpdateBehavior.Crc;
 
     String DEFAULT_FEATURE_RESOLUTION_RANGE = "${range;[====,====]}";
     String DEFAULT_BUNDLE_UPDATE_RANGE = "${range;[==,=+)}";
 
     String UPDATEABLE_URIS = "mvn:.*SNAPSHOT|(?!mvn:).*";
 
-    String SERVICE_REQUIREMENTS_DISABLE = "disable";
-    String SERVICE_REQUIREMENTS_DEFAULT = "default";
-    String SERVICE_REQUIREMENTS_ENFORCE = "enforce";
-
     int DEFAULT_DOWNLOAD_THREADS = 8;
     long DEFAULT_SCHEDULE_DELAY = 250;
     int DEFAULT_SCHEDULE_MAX_RUN = 9;
@@ -65,6 +61,58 @@ public interface FeaturesService {
     }
 
     /**
+     * Configuration options for handling requirements from {@link ServiceNamespace#SERVICE_NAMESPACE} namespace
+     */
+    enum ServiceRequirementsBehavior {
+        /** Remove and do not consider any {@link ServiceNamespace#SERVICE_NAMESPACE} requirements */
+        Disable("disable"),
+        /** Consider {@link ServiceNamespace#SERVICE_NAMESPACE} requirements only for <code>http://karaf.apache.org/xmlns/features/v1.2.1</code> XSD and below */
+        Default("default"),
+        /** Always consider {@link ServiceNamespace#SERVICE_NAMESPACE} requirements */
+        Enforce("enforce");
+
+        private String value;
+
+        ServiceRequirementsBehavior(String value) {
+            this.value = value;
+        }
+
+        public String getValue() {
+            return value;
+        }
+
+        public static ServiceRequirementsBehavior fromString(String serviceRequirements) {
+            return Arrays.stream(values()).filter(sub -> sub.value.equalsIgnoreCase(serviceRequirements)).findFirst().orElse(Default);
+        }
+    }
+
+    /**
+     * Configuration options for checking whether update'able bundle should really be updated
+     */
+    enum SnapshotUpdateBehavior {
+        /** Never update */
+        None("none"),
+        /** Update if CRC differs */
+        Crc("crc"),
+        /** Always update */
+        Always("always");
+
+        private String value;
+
+        SnapshotUpdateBehavior(String value) {
+            this.value = value;
+        }
+
+        public String getValue() {
+            return value;
+        }
+
+        public static SnapshotUpdateBehavior fromString(String updateSnapshots) {
+            return Arrays.stream(values()).filter(sub -> sub.value.equals(updateSnapshots)).findFirst().orElse(Crc);
+        }
+    }
+
+    /**
      * Validate repository contents.
      *
      * @param uri Repository uri.
diff --git a/features/core/src/main/java/org/apache/karaf/features/Library.java b/features/core/src/main/java/org/apache/karaf/features/Library.java
index 7ed2147..f49d938 100644
--- a/features/core/src/main/java/org/apache/karaf/features/Library.java
+++ b/features/core/src/main/java/org/apache/karaf/features/Library.java
@@ -27,8 +27,18 @@ public interface Library {
 
     String getType();
 
+    /**
+     * Whether given library's exported packages should be added to <code>org.osgi.framework.system.packages.extra</code>
+     * property in <code>${karaf.etc}/config.properties</code>.
+     * @return
+     */
     boolean isExport();
 
+    /**
+     * Whether given library's exported packages should be added to <code>org.osgi.framework.bootdelegation</code>
+     * property in <code>${karaf.etc}/config.properties</code>
+     * @return
+     */
     boolean isDelegate();
 
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/LocationPattern.java b/features/core/src/main/java/org/apache/karaf/features/LocationPattern.java
index e5c96eb..cf6f01d 100644
--- a/features/core/src/main/java/org/apache/karaf/features/LocationPattern.java
+++ b/features/core/src/main/java/org/apache/karaf/features/LocationPattern.java
@@ -204,4 +204,9 @@ public class LocationPattern {
         return match;
     }
 
+    @Override
+    public String toString() {
+        return originalUri;
+    }
+
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/model/Bundle.java b/features/core/src/main/java/org/apache/karaf/features/internal/model/Bundle.java
index d0ba74f..d028eb8 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/model/Bundle.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/model/Bundle.java
@@ -66,7 +66,7 @@ public class Bundle implements BundleInfo {
     @XmlTransient
     private boolean blacklisted = false;
     @XmlTransient
-    private boolean overriden = false;
+    private BundleInfo.BundleOverrideMode overriden = BundleInfo.BundleOverrideMode.NONE;
 
     public Bundle() {
     }
@@ -164,13 +164,13 @@ public class Bundle implements BundleInfo {
         this.blacklisted = blacklisted;
     }
 
-    public boolean isOverriden() {
+    @Override
+    public BundleInfo.BundleOverrideMode isOverriden() {
         return overriden;
     }
 
-    public void setOverriden(boolean overriden) {
+    public void setOverriden(BundleInfo.BundleOverrideMode overriden) {
         this.overriden = overriden;
-
     }
 
     @Override
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/model/Conditional.java b/features/core/src/main/java/org/apache/karaf/features/internal/model/Conditional.java
index 97e3001..d9fc5ce 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/model/Conditional.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/model/Conditional.java
@@ -37,6 +37,8 @@ import org.apache.karaf.features.Feature;
         })
 public class Conditional extends Content implements org.apache.karaf.features.Conditional {
 
+    // TODO: use type that really reflects <xs:element name="condition" type="tns:dependency" /> ?
+    // i.e., org.apache.karaf.features.internal.model.Dependency
     @XmlElement(name = "condition", namespace=org.apache.karaf.features.FeaturesNamespaces.URI_CURRENT)
     protected List<String> condition;
 
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/model/Features.java b/features/core/src/main/java/org/apache/karaf/features/internal/model/Features.java
index af3aace..180ee1e 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/model/Features.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/model/Features.java
@@ -29,6 +29,8 @@ import javax.xml.bind.annotation.XmlSchemaType;
 import javax.xml.bind.annotation.XmlTransient;
 import javax.xml.bind.annotation.XmlType;
 
+import org.apache.karaf.features.Blacklisting;
+
 /**
  * <p>Root element of Feature definition. It contains optional attribute which allow
  * name of repository. This name will be used in shell to display source repository
@@ -53,7 +55,7 @@ import javax.xml.bind.annotation.XmlType;
 @XmlRootElement(name = "features", namespace=org.apache.karaf.features.FeaturesNamespaces.URI_CURRENT)
 @XmlAccessorType(XmlAccessType.FIELD)
 @XmlType(name = "features", propOrder = {"repository", "resourceRepository", "feature"})
-public class Features {
+public class Features implements Blacklisting {
 
     @XmlSchemaType(name = "anyURI")
     @XmlElement(name = "repository", namespace=org.apache.karaf.features.FeaturesNamespaces.URI_CURRENT)
@@ -67,6 +69,8 @@ public class Features {
     protected String name;
     @XmlTransient
     private String namespace;
+    @XmlTransient
+    private boolean blacklisted;
 
     /**
      * <p>Get the value of the repository property.</p>
@@ -196,4 +200,14 @@ public class Features {
     public String getNamespace() {
         return namespace;
     }
+
+    @Override
+    public boolean isBlacklisted() {
+        return blacklisted;
+    }
+
+    public void setBlacklisted(boolean blacklisted) {
+        this.blacklisted = blacklisted;
+    }
+
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/osgi/Activator.java b/features/core/src/main/java/org/apache/karaf/features/internal/osgi/Activator.java
index ad6d940..a950f90 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/osgi/Activator.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/osgi/Activator.java
@@ -232,13 +232,13 @@ public class Activator extends BaseActivator {
             getString("overrides", new File(karafEtc, "overrides.properties").toURI().toString()),
             getString("featureResolutionRange", FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE),
             getString("bundleUpdateRange", FeaturesService.DEFAULT_BUNDLE_UPDATE_RANGE),
-            getString("updateSnapshots", FeaturesService.DEFAULT_UPDATE_SNAPSHOTS),
+            getString("updateSnapshots", FeaturesService.DEFAULT_UPDATE_SNAPSHOTS.getValue()),
             getInt("downloadThreads", FeaturesService.DEFAULT_DOWNLOAD_THREADS),
             getLong("scheduleDelay", FeaturesService.DEFAULT_SCHEDULE_DELAY),
             getInt("scheduleMaxRun", FeaturesService.DEFAULT_SCHEDULE_MAX_RUN),
             getString("blacklisted", new File(karafEtc, "blacklisted.properties").toURI().toString()),
             getString("featureProcessing", new File(karafEtc, FEATURES_SERVICE_PROCESSING_FILE).toURI().toString()),
-            getString("serviceRequirements", FeaturesService.SERVICE_REQUIREMENTS_DEFAULT));
+            getString("serviceRequirements", FeaturesService.ServiceRequirementsBehavior.Default.getValue()));
     }
 
     private StateStorage createStateStorage() {
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/region/Subsystem.java b/features/core/src/main/java/org/apache/karaf/features/internal/region/Subsystem.java
index cfd295e..523e838 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/region/Subsystem.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/region/Subsystem.java
@@ -22,6 +22,7 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -76,6 +77,9 @@ import static org.osgi.framework.namespace.IdentityNamespace.CAPABILITY_VERSION_
 import static org.osgi.framework.namespace.IdentityNamespace.IDENTITY_NAMESPACE;
 import static org.osgi.resource.Namespace.REQUIREMENT_FILTER_DIRECTIVE;
 
+/**
+ * A {@link Resource} representing ...
+ */
 public class Subsystem extends ResourceImpl {
 
     private static final String ALL_FILTER = "(|(!(all=*))(all=*))";
@@ -98,20 +102,49 @@ public class Subsystem extends ResourceImpl {
                     IDENTITY_NAMESPACE,
                     Collections.singleton(SUBSYSTEM_FILTER));
 
+    // name of the subsystem: region or region#feature[-version]
     private final String name;
+    // works only with feature scoping. region subsystems by default accept deps
     private final boolean acceptDependencies;
+    // parent Subsystem for child subsystems representing child regions or regions' features
     private final Subsystem parent;
+    // feature for Subsystem representing a feature
     private final Feature feature;
+
     private final boolean mandatory;
+
     private final List<Subsystem> children = new ArrayList<>();
+
+    // a set of filters applied when child subsystem needs capabilities from parent subsystem
     private final Map<String, Set<String>> importPolicy;
+    // a set of filters applied when parent subsystem needs capabilities from child subsystem
     private final Map<String, Set<String>> exportPolicy;
+
+    // contains subsystems representing features of this region, child subsystems for child regions, system resources(?),
+    // bundle resources added explicitly as reqs for this Subsystem, feature resources for subsystems representing
+    // features, ...
     private final List<Resource> installable = new ArrayList<>();
+
+    // mapping from "symbolic-name|version" to a DependencyInfo wrapping a Resource
+    // <bundle dependency="false"> are collected directly in feature's subsystem
+    // <bundle dependency="true"> are collected in first parent subsystem of feature or in subsystem of scoped feature
     private final Map<String, DependencyInfo> dependencies = new HashMap<>();
+    // non-mandatory dependant features (<feature>/<feature>) collected from current and child subsystems representing
+    // features (unless some subsystem for feature has <scoping acceptDependencies="true">)
     private final List<Requirement> dependentFeatures = new ArrayList<>();
 
+    // direct bundle URI dependencies - not added by FeaturesService, but used in startup stage of assembly builder
+    // these bundles will be downloaded
     private final List<String> bundles = new ArrayList<>();
 
+    /**
+     * <p>Constructs root subsystem {@link Resource} for {@link FeaturesService#ROOT_REGION} that imports/exports only
+     * caps/reqs with <code>(type=karaf.subsystem)</code></p>
+     * <p>Root subsystem by default accepts dependencies - will gather dependant features of child feature subsystems,
+     * effectively _flattening_ the set of features within single region's subsystem.</p>
+     *
+     * @param name
+     */
     public Subsystem(String name) {
         super(name, TYPE_SUBSYSTEM, Version.emptyVersion);
         this.name = name;
@@ -123,6 +156,16 @@ public class Subsystem extends ResourceImpl {
         this.mandatory = true;
     }
 
+    /**
+     * <p>Constructs subsystem for a feature that either imports/exports all caps or (see {@link Feature#getScoping()})
+     * has configurable import/export policy + <code>(|(type=karaf.subsystem)(type=karaf.feature))</code> filter in
+     * {@link org.osgi.framework.namespace.IdentityNamespace#IDENTITY_NAMESPACE}</p>
+     * <p>Such subsystem requires <code>type=karaf.feature; osgi.identity=feature-name[; version=feature-version]</code></p>
+     * @param name
+     * @param feature
+     * @param parent
+     * @param mandatory
+     */
     public Subsystem(String name, Feature feature, Subsystem parent, boolean mandatory) {
         super(name, TYPE_SUBSYSTEM, Version.emptyVersion);
         this.name = name;
@@ -146,6 +189,14 @@ public class Subsystem extends ResourceImpl {
                 new VersionRange(VersionTable.getVersion(feature.getVersion()), true));
     }
 
+    /**
+     * <p>Constructs child subsystem {@link Resource} for {@link FeaturesService#ROOT_REGION}'s child
+     * that imports all caps and exports only caps with <code>(type=karaf.subsystem)</code></p>
+     * @param name
+     * @param parent
+     * @param acceptDependencies
+     * @param mandatory
+     */
     public Subsystem(String name, Subsystem parent, boolean acceptDependencies, boolean mandatory) {
         super(name, TYPE_SUBSYSTEM, Version.emptyVersion);
         this.name = name;
@@ -198,6 +249,13 @@ public class Subsystem extends ResourceImpl {
         return feature;
     }
 
+    /**
+     * Create child subsystem for this subsystem. Child will become parent's mandatory requirement to force its resolution.
+     *
+     * @param name
+     * @param acceptDependencies
+     * @return
+     */
     public Subsystem createSubsystem(String name, boolean acceptDependencies) {
         if (feature != null) {
             throw new UnsupportedOperationException("Can not create application subsystems inside a feature subsystem");
@@ -269,15 +327,23 @@ public class Subsystem extends ResourceImpl {
     }
 
     @SuppressWarnings("InfiniteLoopStatement")
-    public void build(Collection<Feature> features) throws Exception {
-        doBuild(features, true);
+    public void build(Map<String, List<Feature>> allFeatures) throws Exception {
+        doBuild(allFeatures, true);
     }
 
-    private void doBuild(Collection<Feature> features, boolean mandatory) throws Exception {
+    /**
+     *
+     * @param allFeatures
+     * @param mandatory
+     * @throws Exception
+     */
+    private void doBuild(Map<String, List<Feature>> allFeatures, boolean mandatory) throws Exception {
         for (Subsystem child : children) {
-            child.doBuild(features, true);
+            child.doBuild(allFeatures, true);
         }
         if (feature != null) {
+            // each dependant feature becomes a non-mandatory (why?) requirement of first parent that
+            // accepts dependencies
             for (Dependency dep : feature.getDependencies()) {
                 Subsystem ss = this;
                 while (!ss.isAcceptDependencies()) {
@@ -285,13 +351,14 @@ public class Subsystem extends ResourceImpl {
                 }
                 ss.requireFeature(dep.getName(), dep.getVersion(), false);
             }
+            // each conditional feature becomes a child subsystem of this feature's subsystem
             for (Conditional cond : feature.getConditional()) {
                 Feature fcond = cond.asFeature();
                 String ssName = this.name + "#" + (fcond.hasVersion() ? fcond.getName() + "-" + fcond.getVersion() : fcond.getName());
                 Subsystem fs = getChild(ssName);
                 if (fs == null) {
                     fs = new Subsystem(ssName, fcond, this, true);
-                    fs.doBuild(features, false);
+                    fs.doBuild(allFeatures, false);
                     installable.add(fs);
                     children.add(fs);
                 }
@@ -305,20 +372,21 @@ public class Subsystem extends ResourceImpl {
             if (requirements.isEmpty()) {
                 break;
             }
+            // for each feature requirement on this subsystem (osgi.identity;type=karaf.feature), we create a
+            // Subsystem representing mandatory feature.
             for (Requirement requirement : requirements) {
                 String name = (String) requirement.getAttributes().get(IDENTITY_NAMESPACE);
                 String type = (String) requirement.getAttributes().get(CAPABILITY_TYPE_ATTRIBUTE);
                 VersionRange range = (VersionRange) requirement.getAttributes().get(CAPABILITY_VERSION_ATTRIBUTE);
-                if (TYPE_FEATURE.equals(type)) {
-                    for (Feature feature : features) {
-                        if (feature.getName().equals(name)
-                                && (range == null || range.contains(VersionTable.getVersion(feature.getVersion())))) {
+                if (TYPE_FEATURE.equals(type) && allFeatures.containsKey(name)) {
+                    for (Feature feature : allFeatures.get(name)) {
+                        if (range == null || range.contains(VersionTable.getVersion(feature.getVersion()))) {
                             if (feature != this.feature) {
                                 String ssName = this.name + "#" + (feature.hasVersion() ? feature.getName() + "-" + feature.getVersion() : feature.getName());
                                 Subsystem fs = getChild(ssName);
                                 if (fs == null) {
                                     fs = new Subsystem(ssName, feature, this, mandatory && !SubsystemResolveContext.isOptional(requirement));
-                                    fs.build(features);
+                                    fs.build(allFeatures);
                                     installable.add(fs);
                                     children.add(fs);
                                 }
@@ -347,10 +415,12 @@ public class Subsystem extends ResourceImpl {
                 String[] p = prereq.split("/");
                 if (feature.getName().equals(p[0])
                         && VersionRange.parseVersionRange(p[1]).contains(Version.parseVersion(feature.getVersion()))) {
+                    // our feature is already among prerequisites, so ...
                     match = true;
                     break;
                 }
             }
+            // ... we won't be adding its prerequisites - they'll be handled after another PartialDeploymentException
             if (!match) {
                 for (Dependency dep : feature.getDependencies()) {
                     if (dep.isPrerequisite()) {
@@ -361,21 +431,32 @@ public class Subsystem extends ResourceImpl {
         }
     }
 
+    /**
+     * Downloads bundles for all the features in current and child subsystems. But also collects bundles
+     * as {@link DependencyInfo}.
+     * @param manager
+     * @param featureResolutionRange
+     * @param serviceRequirements
+     * @param repos
+     * @throws Exception
+     */
     @SuppressWarnings("InfiniteLoopStatement")
     public void downloadBundles(DownloadManager manager,
-                                Set<String> overrides,
                                 String featureResolutionRange,
-                                final String serviceRequirements,
-                                RepositoryManager repos) throws Exception {
+                                final FeaturesService.ServiceRequirementsBehavior serviceRequirements,
+                                RepositoryManager repos,
+                                SubsystemResolverCallback callback) throws Exception {
         for (Subsystem child : children) {
-            child.downloadBundles(manager, overrides, featureResolutionRange, serviceRequirements, repos);
+            child.downloadBundles(manager, featureResolutionRange, serviceRequirements, repos, callback);
         }
-        final Map<String, ResourceImpl> bundles = new ConcurrentHashMap<>();
-        final Downloader downloader = manager.createDownloader();
+
+        // collect BundleInfos for given feature - both direct <feature>/<bundle>s and <feature>/<conditional>/<bundle>s
         final Map<BundleInfo, Conditional> infos = new HashMap<>();
+        final Downloader downloader = manager.createDownloader();
         if (feature != null) {
             for (Conditional cond : feature.getConditional()) {
                 for (final BundleInfo bi : cond.getBundles()) {
+                    // bundles from conditional features will be added as non-mandatory requirements
                     infos.put(bi, cond);
                 }
             }
@@ -383,36 +464,62 @@ public class Subsystem extends ResourceImpl {
                 infos.put(bi, null);
             }
         }
-        boolean removeServiceRequirements;
-        if (FeaturesService.SERVICE_REQUIREMENTS_DISABLE.equals(serviceRequirements)) {
-            removeServiceRequirements = true;
-        } else if (feature != null && FeaturesService.SERVICE_REQUIREMENTS_DEFAULT.equals(serviceRequirements)) {
-            removeServiceRequirements = FeaturesNamespaces.URI_1_0_0.equals(feature.getNamespace())
-                                     || FeaturesNamespaces.URI_1_1_0.equals(feature.getNamespace())
-                                     || FeaturesNamespaces.URI_1_2_0.equals(feature.getNamespace())
-                                     || FeaturesNamespaces.URI_1_2_1.equals(feature.getNamespace());
-        } else {
-            removeServiceRequirements = false;
+
+        // features model doesn't have blacklisted entries removed, but marked as blacklisted - we now don't have
+        // to download them
+        //infos.keySet().removeIf(Blacklisting::isBlacklisted);
+        for (Iterator<BundleInfo> iterator = infos.keySet().iterator(); iterator.hasNext(); ) {
+            BundleInfo bi = iterator.next();
+            if (bi.isBlacklisted()) {
+                iterator.remove();
+                if (callback != null) {
+                    callback.bundleBlacklisted(bi);
+                }
+            }
         }
+
+        // all downloaded bundles
+        final Map<String, ResourceImpl> bundles = new ConcurrentHashMap<>();
+        // resources for locations that were overriden in OSGi mode - to check whether the override should actually
+        // take place, by checking resource's headers
+        final Map<String, ResourceImpl> overrides = new ConcurrentHashMap<>();
+
+        boolean removeServiceRequirements = serviceRequirementsBehavior(feature, serviceRequirements);
+
+        // download collected BundleInfo locations
         for (Map.Entry<BundleInfo, Conditional> entry : infos.entrySet()) {
             final BundleInfo bi = entry.getKey();
             final String loc = bi.getLocation();
             downloader.download(loc, provider -> {
-                bundles.put(loc, createResource(loc, getMetadata(provider), removeServiceRequirements));
+                // always download location (could be overriden)
+                ResourceImpl resource = createResource(loc, getMetadata(provider), removeServiceRequirements);
+                bundles.put(loc, resource);
+
+                if (bi.isOverriden() == BundleInfo.BundleOverrideMode.OSGI) {
+                    // also download original from original bundle URI to check if we should override by comparing
+                    // symbolic name - requires MANIFEST.MF header access. If there should be no override, we'll get
+                    // back to original URI
+                    downloader.download(bi.getOriginalLocation(), provider2 -> {
+                        ResourceImpl originalResource = createResource(bi.getOriginalLocation(),
+                                getMetadata(provider2), removeServiceRequirements);
+                        bundles.put(bi.getOriginalLocation(), originalResource);
+                        // an entry in overrides map means that given location was overriden
+                        overrides.put(loc, originalResource);
+                    });
+                }
             });
         }
+        // download direct bundle: requirements - without consulting overrides
         for (Clause bundle : Parser.parseClauses(this.bundles.toArray(new String[this.bundles.size()]))) {
             final String loc = bundle.getName();
             downloader.download(loc, provider -> {
                 bundles.put(loc, createResource(loc, getMetadata(provider), removeServiceRequirements));
             });
         }
-        for (String override : overrides) {
-            final String loc = Overrides.extractUrl(override);
-            downloader.download(loc, provider -> {
-                bundles.put(loc, createResource(loc, getMetadata(provider), removeServiceRequirements));
-            });
-        }
+        // we *don't* have to download overrides separately - they're already taken into account from processed model
+
+        // download additional libraries - only exported, so they're capabilities are taken into account during
+        // resolution process
         if (feature != null) {
             for (Library library : feature.getLibraries()) {
                 if (library.isExport()) {
@@ -424,18 +531,25 @@ public class Subsystem extends ResourceImpl {
             }
         }
         downloader.await();
+
+        // opposite to what we had before. Currently bundles are already overriden at model level, but
+        // as we finally have access to headers, we can compare symbolic names and if override mode is OSGi, then
+        // we can restore original resource if there should be no override.
         Overrides.override(bundles, overrides);
+
         if (feature != null) {
             // Add conditionals
             Map<Conditional, Resource> resConds = new HashMap<>();
             for (Conditional cond : feature.getConditional()) {
                 FeatureResource resCond = FeatureResource.build(feature, cond, featureResolutionRange, bundles);
+                // feature's subsystem will optionally require conditional feature resource
                 addIdentityRequirement(this, resCond, false);
+                // but it's a mandatory requirement in other way
                 addIdentityRequirement(resCond, this, true);
                 installable.add(resCond);
                 resConds.put(cond, resCond);
             }
-            // Add features
+            // Add features and make it require given subsystem that represents logical feature requirement
             FeatureResource resFeature = FeatureResource.build(feature, featureResolutionRange, bundles);
             addIdentityRequirement(resFeature, this);
             installable.add(resFeature);
@@ -447,6 +561,7 @@ public class Subsystem extends ResourceImpl {
                 ResourceImpl res = bundles.get(loc);
                 int sl = bi.getStartLevel() <= 0 ? feature.getStartLevel() : bi.getStartLevel();
                 if (cond != null) {
+                    // bundle of conditional feature will have mandatory requirement on it
                     addIdentityRequirement(res, resConds.get(cond), true);
                 }
                 boolean mandatory = !bi.isDependency() && cond == null;
@@ -486,16 +601,39 @@ public class Subsystem extends ResourceImpl {
                 addDependency(bundles.get(loc), false, start, startLevel, blacklisted);
             } else {
                 doAddDependency(bundles.get(loc), true, start, startLevel, blacklisted);
+                // non dependency bundle will be added as osgi.identity req on type=osgi.bundle
                 addIdentityRequirement(this, bundles.get(loc));
             }
         }
         // Compute dependencies
         for (DependencyInfo info : dependencies.values()) {
             installable.add(info.resource);
+            // bundle resource will have a requirement on its feature's subsystem too
+            // when bundle is declared with dependency="true", it will have a requirement on its region's subsystem
             addIdentityRequirement(info.resource, this, info.mandatory);
         }
     }
 
+    /**
+     * How to handle requirements from {@link org.osgi.namespace.service.ServiceNamespace#SERVICE_NAMESPACE} for
+     * given feature.
+     * @param feature
+     * @param serviceRequirements
+     * @return
+     */
+    private boolean serviceRequirementsBehavior(Feature feature, FeaturesService.ServiceRequirementsBehavior serviceRequirements) {
+        if (FeaturesService.ServiceRequirementsBehavior.Disable == serviceRequirements) {
+            return true;
+        } else if (feature != null && FeaturesService.ServiceRequirementsBehavior.Default == serviceRequirements) {
+            return FeaturesNamespaces.URI_1_0_0.equals(feature.getNamespace())
+                    || FeaturesNamespaces.URI_1_1_0.equals(feature.getNamespace())
+                    || FeaturesNamespaces.URI_1_2_0.equals(feature.getNamespace())
+                    || FeaturesNamespaces.URI_1_2_1.equals(feature.getNamespace());
+        } else {
+            return false;
+        }
+    }
+
     ResourceImpl cloneResource(Resource resource) {
         ResourceImpl res = new ResourceImpl();
         for (Capability cap : resource.getCapabilities(null)) {
@@ -536,6 +674,16 @@ public class Subsystem extends ResourceImpl {
         throw new IllegalArgumentException("Resource " + provider.getUrl() + " does not contain a manifest");
     }
 
+    /**
+     * Adds a {@link Resource} as dependency if this subsystem {@link Subsystem#isAcceptDependencies() accepts dependencies},
+     * otherwise, the dependency is added to parent subsystem, effectively searching for first parent subsystem representing
+     * region or scoped feature.
+     * @param resource
+     * @param mandatory
+     * @param start
+     * @param startLevel
+     * @param blacklisted
+     */
     void addDependency(ResourceImpl resource, boolean mandatory, boolean start, int startLevel, boolean blacklisted) {
         if (isAcceptDependencies()) {
             doAddDependency(resource, mandatory, start, startLevel, blacklisted);
@@ -544,12 +692,27 @@ public class Subsystem extends ResourceImpl {
         }
     }
 
+    /**
+     * Adds a {@link Resource} to this subsystem
+     * @param resource
+     * @param mandatory
+     * @param start
+     * @param startLevel
+     * @param blacklisted
+     */
     private void doAddDependency(ResourceImpl resource, boolean mandatory, boolean start, int startLevel, boolean blacklisted) {
         String id = ResolverUtil.getSymbolicName(resource) + "|" + ResolverUtil.getVersion(resource);
         DependencyInfo info = new DependencyInfo(resource, mandatory, start, startLevel, blacklisted);
         dependencies.merge(id, info, this::merge);
     }
 
+    /**
+     * Merges two dependencies by taking lower start level, stronger <code>mandatory</code> option and stronger
+     * <code>start</code> option.
+     * @param di1
+     * @param di2
+     * @return
+     */
     private DependencyInfo merge(DependencyInfo di1, DependencyInfo di2) {
         DependencyInfo info = new DependencyInfo();
         if (di1.resource != di2.resource) {
@@ -561,7 +724,7 @@ public class Subsystem extends ResourceImpl {
                 info.resource = di2.resource;
             } else {
                 String id = ResolverUtil.getSymbolicName(di1.resource) + "/" + ResolverUtil.getVersion(di1.resource);
-                throw new IllegalStateException("Resource " + id + " is duplicated on subsystem " + this.toString() + ". First resource requires " + (r1 != null ? r1 : "nothing") + " while the second requires " + (r2 != null ? r2 : "nothing"));
+                throw new IllegalStateException("Resource " + id + " is duplicated on subsystem " + this.toString() + ". First resource requires " + r1 + " while the second requires " + r2);
             }
         } else {
             info.resource = di1.resource;
@@ -585,13 +748,16 @@ public class Subsystem extends ResourceImpl {
         return null;
     }
 
+    /**
+     * TODOCUMENT: More generic than just {@link BundleInfo}
+     */
     class DependencyInfo implements BundleInfo {
         ResourceImpl resource;
         boolean mandatory;
         boolean start;
         int startLevel;
         boolean blacklisted;
-        boolean overriden;
+        BundleInfo.BundleOverrideMode overriden;
 
         public DependencyInfo() {
         }
@@ -636,11 +802,11 @@ public class Subsystem extends ResourceImpl {
         }
 
         @Override
-        public boolean isOverriden() {
+        public BundleInfo.BundleOverrideMode isOverriden() {
             return overriden;
         }
 
-        public void setOverriden(boolean overriden) {
+        public void setOverriden(BundleInfo.BundleOverrideMode overriden) {
             this.overriden = overriden;
         }
 
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolveContext.java b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolveContext.java
index c162b81..6bb1166 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolveContext.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolveContext.java
@@ -38,7 +38,6 @@ import org.apache.karaf.features.internal.resolver.ResourceImpl;
 import org.eclipse.equinox.region.Region;
 import org.eclipse.equinox.region.RegionDigraph;
 import org.eclipse.equinox.region.RegionFilter;
-import org.osgi.framework.BundleException;
 import org.osgi.framework.namespace.PackageNamespace;
 import org.osgi.framework.wiring.BundleRevision;
 import org.osgi.namespace.service.ServiceNamespace;
@@ -72,9 +71,9 @@ public class SubsystemResolveContext extends ResolveContext {
     private final Repository repository;
     private final Repository globalRepository;
     private final Downloader downloader;
-    private final String serviceRequirements;
+    private final FeaturesService.ServiceRequirementsBehavior serviceRequirements;
 
-    public SubsystemResolveContext(Subsystem root, RegionDigraph digraph, Repository globalRepository, Downloader downloader, String serviceRequirements) throws BundleException {
+    public SubsystemResolveContext(Subsystem root, RegionDigraph digraph, Repository globalRepository, Downloader downloader, FeaturesService.ServiceRequirementsBehavior serviceRequirements) {
         this.root = root;
         this.globalRepository = globalRepository != null ? new SubsystemRepository(globalRepository) : null;
         this.downloader = downloader;
@@ -159,6 +158,11 @@ public class SubsystemResolveContext extends ResolveContext {
         return PackageNamespace.RESOLUTION_DYNAMIC.equals(resolution);
     }
 
+    /**
+     * {@link #resToSub} will quickly map all {@link Subsystem#getInstallable() installable resources} to their
+     * {@link Subsystem}
+     * @param subsystem
+     */
     void prepare(Subsystem subsystem) {
         resToSub.put(subsystem, subsystem);
         for (Resource res : subsystem.getInstallable()) {
@@ -275,7 +279,7 @@ public class SubsystemResolveContext extends ResolveContext {
     @Override
     public boolean isEffective(Requirement requirement) {
         boolean isServiceReq = ServiceNamespace.SERVICE_NAMESPACE.equals(requirement.getNamespace());
-        return !(isServiceReq && FeaturesService.SERVICE_REQUIREMENTS_DISABLE.equals(serviceRequirements));
+        return !(isServiceReq && FeaturesService.ServiceRequirementsBehavior.Disable == serviceRequirements);
     }
 
     @Override
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolver.java b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolver.java
index d9ec573..87de2b8 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolver.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolver.java
@@ -22,10 +22,12 @@ import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.nio.file.StandardOpenOption;
 import java.util.*;
+import java.util.stream.Collectors;
 
 import org.apache.felix.utils.collections.DictionaryAsMap;
 import org.apache.karaf.features.BundleInfo;
 import org.apache.karaf.features.Feature;
+import org.apache.karaf.features.FeaturesService;
 import org.apache.karaf.features.internal.download.DownloadManager;
 import org.apache.karaf.features.internal.download.Downloader;
 import org.apache.karaf.features.internal.download.StreamProvider;
@@ -66,7 +68,7 @@ import static org.osgi.framework.namespace.IdentityNamespace.IDENTITY_NAMESPACE;
 import static org.osgi.framework.namespace.IdentityNamespace.TYPE_BUNDLE;
 import static org.osgi.framework.namespace.IdentityNamespace.TYPE_FRAGMENT;
 
-public class SubsystemResolver {
+public class SubsystemResolver implements SubsystemResolverResolution, SubsystemResolverResult {
 
     private static final Logger LOGGER = LoggerFactory.getLogger(SubsystemResolver.class);
 
@@ -86,17 +88,30 @@ public class SubsystemResolver {
     private RegionDigraph flatDigraph;
     private Map<String, Map<String, BundleInfo>> bundleInfos;
 
+    private SubsystemResolverCallback callback;
+
     public SubsystemResolver(Resolver resolver, DownloadManager manager) {
         this.resolver = resolver;
         this.manager = manager;
     }
 
+    public void setDeployCallback(SubsystemResolverCallback callback) {
+        this.callback = callback;
+    }
+
+    @Override
     public void prepare(
-            Collection<Feature> allFeatures,
+            Map<String, List<Feature>> allFeatures,
             Map<String, Set<String>> requirements,
             Map<String, Set<BundleRevision>> system
     ) throws Exception {
-        // Build subsystems on the fly
+        // #1. Build subsystems on the fly
+        //  - regions use hierarchical names with root region called "root" and child regions named "root/child",
+        //    "root/child/grandchild", etc.
+        //  - there can be only one root region and even if equinox Regions can be configured as digraph, only tree
+        //    structure is used
+        //  - each region will have corresponding Subsystem created and (being an OSGi Resource), will _require_
+        //    related requirements. Each region's subsystem will also _require_ all child subsystems
         for (Map.Entry<String, Set<String>> entry : requirements.entrySet()) {
             String[] parts = entry.getKey().split("/");
             if (root == null) {
@@ -106,9 +121,14 @@ public class SubsystemResolver {
             }
             Subsystem ss = root;
             for (int i = 1; i < parts.length; i++) {
-                ss = getOrCreateChild(ss, parts[i]);
+                String childName = Arrays.stream(Arrays.copyOfRange(parts, 0, i + 1)).collect(Collectors.joining("/"));
+                ss = getOrCreateChild(ss, childName, parts[i]);
             }
             for (String requirement : entry.getValue()) {
+                // #1a. each "[feature:]*" and "requirement:*" requirements are added directly as resource requirements:
+                //  - feature: ns=osgi.identity, 'osgi.identity=f1; type=karaf.feature; filter:="(&(osgi.identity=f1)(type=karaf.feature))"'
+                //  - requirement: as-is
+                //  - bundle: added only as downloadable bundle - used only by assembly builder
                 ss.require(requirement);
             }
         }
@@ -116,10 +136,18 @@ public class SubsystemResolver {
             return;
         }
 
-        // Pre-resolve
+        // #2. Pre-resolve
+        //  - for each region's subsystem X, feature requirements are changed into child subsystems of X
+        //  - for each feature, any dependant features (<feature>/<feature>) will become non-mandatory (why?)
+        //    child subsystem of the same region's subsystem as original feature
+        //  - for each feature, any conditional (<feature>/<conditional>) will become mandatory (why?)
+        //    child subsystem of the original feature's subsystem
         root.build(allFeatures);
 
-        // Add system resources
+        // #3. Add system resources
+        //  - from all unmanaged bundles we'll gather Provide-Capability headers' clauses in "osgi.service" namespace
+        //    and Export-Service headers
+        //  - these capabilities will be added to "dummy" Resource added as o.a.k.features.internal.region.Subsystem.installable
         BundleRevision sysBundleRev = null;
         boolean hasEeCap = false;
         for (Map.Entry<String, Set<BundleRevision>> entry : system.entrySet()) {
@@ -164,17 +192,18 @@ public class SubsystemResolver {
         }
     }
 
-    public Set<String> collectPrerequisites() throws Exception {
+    @Override
+    public Set<String> collectPrerequisites() {
         if (root != null) {
             return root.collectPrerequisites();
         }
         return new HashSet<>();
     }
 
+    @Override
     public Map<Resource, List<Wire>> resolve(
-            Set<String> overrides,
             String featureResolutionRange,
-            String serviceRequirements,
+            FeaturesService.ServiceRequirementsBehavior serviceRequirements,
             final Repository globalRepository,
             String outputFile) throws Exception {
 
@@ -183,8 +212,7 @@ public class SubsystemResolver {
         }
 
         // Download bundles
-        RepositoryManager repos = new RepositoryManager();
-        root.downloadBundles(manager, overrides, featureResolutionRange, serviceRequirements, repos);
+        root.downloadBundles(manager, featureResolutionRange, serviceRequirements, new RepositoryManager(), callback);
 
         // Populate digraph and resolve
         digraph = new StandardRegionDigraph(null, null);
@@ -199,6 +227,7 @@ public class SubsystemResolver {
             }
             json.put("repository", toJson(context.getRepository()));
             try {
+                // this is where the magic happens...
                 wiring = resolver.resolve(context);
                 json.put("success", "true");
                 json.put("wiring", toJson(wiring));
@@ -215,6 +244,7 @@ public class SubsystemResolver {
                 }
             }
         } else {
+            // this is where the magic happens...
             wiring = resolver.resolve(context);
         }
         downloader.await();
@@ -294,6 +324,7 @@ public class SubsystemResolver {
         return obj;
     }
 
+    @Override
     public Map<String, Map<String, BundleInfo>> getBundleInfos() {
         if (bundleInfos == null) {
             bundleInfos = new HashMap<>();
@@ -312,14 +343,17 @@ public class SubsystemResolver {
         }
     }
 
+    @Override
     public Map<String, StreamProvider> getProviders() {
         return manager.getProviders();
     }
 
+    @Override
     public Map<Resource, List<Wire>> getWiring() {
         return wiring;
     }
 
+    @Override
     public RegionDigraph getFlatDigraph() throws BundleException, InvalidSyntaxException {
         if (flatDigraph == null) {
             flatDigraph = new StandardRegionDigraph(null, null);
@@ -355,6 +389,10 @@ public class SubsystemResolver {
         return flatDigraph;
     }
 
+    /**
+     * A mapping from subsystem, to parent subsystem representing a region or {@link Feature#getScoping() scoped feature}.
+     * @return
+     */
     public Map<String, String> getFlatSubsystemsMap() {
         if (flatSubsystemsMap == null) {
             flatSubsystemsMap = new HashMap<>();
@@ -363,6 +401,7 @@ public class SubsystemResolver {
         return flatSubsystemsMap;
     }
 
+    @Override
     public Map<String, Set<Resource>> getBundlesPerRegions() {
         if (bundlesPerRegions == null) {
             bundlesPerRegions = invert(getBundles());
@@ -371,7 +410,6 @@ public class SubsystemResolver {
     }
 
     /**
-     *
      * @return map of bundles and the region they are deployed in
      */
     public Map<Resource, String> getBundles() {
@@ -386,6 +424,7 @@ public class SubsystemResolver {
         return bundles;
     }
 
+    @Override
     public Map<String, Set<Resource>> getFeaturesPerRegions() {
         if (featuresPerRegions == null) {
             featuresPerRegions = invert(getFeatures());
@@ -393,6 +432,9 @@ public class SubsystemResolver {
         return featuresPerRegions;
     }
 
+    /**
+     * @return map of features and the region they are deployed in
+     */
     public Map<Resource, String> getFeatures() {
         if (features == null) {
             SimpleFilter sf = createFilter(IDENTITY_NAMESPACE, "*",
@@ -403,7 +445,7 @@ public class SubsystemResolver {
     }
 
     /**
-     *
+     * Returns a mapping for resources that match given filter, to a subsystem that represents region or scoped feature.
      * @param resourceFilter
      * @return map from resource to region name
      */
@@ -503,6 +545,12 @@ public class SubsystemResolver {
 
     }
 
+    /**
+     * Collect a mapping from every subsystem to their first parent subsystem that is not <em>flat</em>, i.e.,
+     * is not a subsystem for feature or represents a feature with scoping.
+     * @param subsystem
+     * @param toFlatten
+     */
     private void findSubsystemsToFlatten(Subsystem subsystem, Map<String, String> toFlatten) {
         Subsystem nonFlat = subsystem;
         while (isFlat(nonFlat)) {
@@ -516,6 +564,11 @@ public class SubsystemResolver {
         }
     }
 
+    /**
+     * Subsystem is <em>flat</em> if it represents a feature and doesn't declare scoping
+     * @param subsystem
+     * @return
+     */
     private static boolean isFlat(Subsystem subsystem) {
         if (subsystem == null || subsystem.getFeature() == null) {
             return false;
@@ -523,14 +576,27 @@ public class SubsystemResolver {
         return subsystem.getFeature() != null && subsystem.getFeature().getScoping() == null;
     }
 
-    private static Subsystem getOrCreateChild(Subsystem ss, String name) {
-        Subsystem child = ss.getChild(name);
-        return child != null ? child : ss.createSubsystem(name, true);
+    private static Subsystem getOrCreateChild(Subsystem ss, String childName, String newName) {
+        Subsystem child = ss.getChild(childName);
+        return child != null ? child : ss.createSubsystem(newName, true);
     }
 
+    /**
+     * <p>Fills {@link RegionDigraph} using information in populated {@link Subsystem}. Each subsystem, not only
+     * subsystem representing a region, will be mapped to distinct region. We have subsystems created for:<ul>
+     *     <li>regions: "region", ..., "region/sub/region"</li>
+     *     <li>features: "region/sub/region#fx-version", ..., "region/sub/region#fz-version"</li>
+     *     <li>conditional features: "region/sub/region#fx#fx-condition-fy-version", ...</li>
+     * </ul></p>
+     * @param digraph
+     * @param subsystem
+     * @throws BundleException
+     * @throws InvalidSyntaxException
+     */
     private void populateDigraph(RegionDigraph digraph, Subsystem subsystem) throws BundleException, InvalidSyntaxException {
         Region region = digraph.createRegion(subsystem.getName());
         if (subsystem.getParent() != null) {
+            // there's always a parent, since we're traversing breadth-first
             Region parent = digraph.getRegion(subsystem.getParent().getName());
             digraph.connect(region, createRegionFilterBuilder(digraph, subsystem.getImportPolicy()).build(), parent);
             digraph.connect(parent, createRegionFilterBuilder(digraph, subsystem.getExportPolicy()).build(), region);
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessor.java b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverCallback.java
similarity index 58%
copy from features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessor.java
copy to features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverCallback.java
index 7a74fae..25340bc 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessor.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverCallback.java
@@ -16,27 +16,19 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.karaf.features.internal.service;
+package org.apache.karaf.features.internal.region;
 
-import org.apache.karaf.features.Repository;
-import org.apache.karaf.features.internal.model.Features;
+import org.apache.karaf.features.BundleInfo;
 
 /**
- * Service that can process (enhance, modify, trim, ...) a set of features read from {@link Repository}.
+ * Additional callback methods that may be invoked from {@link SubsystemResolver}
  */
-public interface FeaturesProcessor {
+public interface SubsystemResolverCallback {
 
     /**
-     * Checks whether given repository URI is <em>blacklisted</em>
-     * @param uri
-     * @return
+     * Notification about {@link BundleInfo bundle} being blacklisted
+     * @param bundleInfo
      */
-    boolean isRepositoryBlacklisted(String uri);
-
-    /**
-     * Processes original {@link Features JAXB model of features}
-     * @param features
-     */
-    void process(Features features);
+    void bundleBlacklisted(BundleInfo bundleInfo);
 
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverResolution.java b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverResolution.java
new file mode 100644
index 0000000..ec12086
--- /dev/null
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverResolution.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.karaf.features.internal.region;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.karaf.features.Feature;
+import org.apache.karaf.features.FeaturesService;
+import org.eclipse.equinox.region.Region;
+import org.osgi.framework.wiring.BundleRevision;
+import org.osgi.resource.Resource;
+import org.osgi.resource.Wire;
+import org.osgi.service.repository.Repository;
+
+/**
+ * Public API of {@link SubsystemResolver} - for the purpose of documentation and categorization to public and internal
+ * methods. This interface groups methods related to resolution of {@link Subsystem subsystems}.
+ */
+public interface SubsystemResolverResolution {
+
+    /**
+     * <p>Prepares the resolver by configuring {@link Subsystem} hierarchy</p>
+     * <p>The input is a mapping from {@link Region region names} to a set of logical requirements.<br/>
+     * The effect is:<ul>
+     *     <li>A tree of {@link Subsystem subsystems} where the root subsystem represents {@link FeaturesService#ROOT_REGION}
+     *      with regions like <code>root/app1</code> represented as child subsystems.</li>
+     *     <li>A subsystem is created for each feature requirement and added as child and requirement for given region's subsystem</li>
+     *     <li>Each subsystem for a feature has optional requirements for conditional features</li>
+     * </ul></p>
+     *
+     * @param allFeatures all currently available features partitioned by name
+     * @param requirements desired mapping from regions to logical requirements
+     * @param system mapping from regions to unmanaged {@link BundleRevision}s
+     * @throws Exception
+     */
+    void prepare(Map<String, List<Feature>> allFeatures,
+                 Map<String, Set<String>> requirements,
+                 Map<String, Set<BundleRevision>> system) throws Exception;
+
+    /**
+     * Before attempting {@link #resolve resolution}, we can collect features' prerequisites. If there are any,
+     * caller may decide to deploy another set of requirements <strong>before</strong> the initial ones.
+     * Prerequisites allow to install for example <code>wrap</code> feature before installing a feature with bundle
+     * using <code>wrap:</code> protocol.
+     * @return
+     */
+    Set<String> collectPrerequisites();
+
+    /**
+     *
+     * @param featureResolutionRange
+     * @param serviceRequirements how to handle requirements from {@link org.osgi.namespace.service.ServiceNamespace#SERVICE_NAMESPACE}
+     * namespace
+     * @param globalRepository
+     * @param outputFile
+     * @return
+     * @throws Exception
+     */
+    public Map<Resource, List<Wire>> resolve(String featureResolutionRange,
+                                             FeaturesService.ServiceRequirementsBehavior serviceRequirements,
+                                             final Repository globalRepository,
+                                             String outputFile) throws Exception;
+
+}
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverResult.java b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverResult.java
new file mode 100644
index 0000000..216a97c
--- /dev/null
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/region/SubsystemResolverResult.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.karaf.features.internal.region;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.karaf.features.BundleInfo;
+import org.apache.karaf.features.internal.download.StreamProvider;
+import org.eclipse.equinox.region.RegionDigraph;
+import org.osgi.framework.BundleException;
+import org.osgi.framework.InvalidSyntaxException;
+import org.osgi.resource.Resource;
+import org.osgi.resource.Wire;
+import org.osgi.service.resolver.ResolveContext;
+
+/**
+ * Public API of {@link SubsystemResolver} - for the purpose of documentation and categorization to public and internal
+ * methods. This interface groups methods invoked after performing resolution of {@link Subsystem subsystems}.
+ */
+public interface SubsystemResolverResult {
+
+    /**
+     * Get a map between regions, bundle locations and actual {@link BundleInfo}
+     * @return
+     */
+    Map<String, Map<String, BundleInfo>> getBundleInfos();
+
+    /**
+     * Get map of all downloaded resources (location -&gt; provider)
+     * @return
+     */
+    Map<String, StreamProvider> getProviders();
+
+    /**
+     * Returns a result of {@link org.osgi.service.resolver.Resolver#resolve(ResolveContext)}
+     * @return
+     */
+    Map<Resource, List<Wire>> getWiring();
+
+    /**
+     * Return directed graph of {@link org.eclipse.equinox.region.Region regions} after resolution.
+     * @return
+     * @throws BundleException
+     * @throws InvalidSyntaxException
+     */
+    RegionDigraph getFlatDigraph() throws BundleException, InvalidSyntaxException;
+
+    /**
+     * Returns a mapping between regions and a set of bundle {@link Resource resources}
+     * @return
+     */
+    Map<String, Set<Resource>> getBundlesPerRegions();
+
+    /**
+     * Returns a mapping between regions and a set of feature {@link Resource resources}
+     * @return
+     */
+    Map<String, Set<Resource>> getFeaturesPerRegions();
+
+}
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/resolver/FeatureResource.java b/features/core/src/main/java/org/apache/karaf/features/internal/resolver/FeatureResource.java
index e0017a8..821931c 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/resolver/FeatureResource.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/resolver/FeatureResource.java
@@ -34,6 +34,12 @@ import static org.apache.karaf.features.internal.resolver.ResourceUtils.TYPE_FEA
 import static org.apache.karaf.features.internal.resolver.ResourceUtils.addIdentityRequirement;
 
 /**
+ * <p>An OSGi {@link Resource} representing Karaf feature. It has requirements on all its non-dependency
+ * (<code>dependency="false"</code>) bundles.</p>
+ * <p>It'll also use arbitrary capabilities ({@code <feature>/<capability>}) and requirements
+ * ({@code <feature>/<requirement>}).</p>
+ * <p>Dependant features ({@code <feature>/<feature>}) without <code>dependency="true"</code> will also be added
+ * as <code>osgi.identity</code> requirements with <code>type=karaf.feature</code>.</p>
  */
 public final class FeatureResource extends ResourceImpl {
 
@@ -46,15 +52,30 @@ public final class FeatureResource extends ResourceImpl {
         this.feature = feature;
     }
 
+    /**
+     * Constructs a {@link Resource} for conditional of a feature
+     * @param feature
+     * @param conditional
+     * @param featureRange
+     * @param locToRes
+     * @return
+     * @throws BundleException
+     */
     public static FeatureResource build(Feature feature, Conditional conditional, String featureRange, Map<String, ? extends Resource> locToRes) throws BundleException {
         Feature fcond = conditional.asFeature();
         FeatureResource resource = build(fcond, featureRange, locToRes);
         for (String cond : conditional.getCondition()) {
             if (cond.startsWith("req:")) {
+                // <conditional>/<condition>req:xxx</condition>
+                // conditional feature will require all its bundles and will have all declared, generic
+                // requirements
                 cond = cond.substring("req:".length());
                 List<Requirement> reqs = ResourceBuilder.parseRequirement(resource, cond);
                 resource.addRequirements(reqs);
             } else {
+                // <conditional>/<condition>xxx</condition>
+                // conditional feature will require all its bundles and will require the features that are the
+                // conditions with "condition:=true" directive
                 org.apache.karaf.features.internal.model.Dependency dep = new org.apache.karaf.features.internal.model.Dependency();
                 String[] p = cond.split("/");
                 dep.setName(p[0]);
@@ -67,14 +88,23 @@ public final class FeatureResource extends ResourceImpl {
         org.apache.karaf.features.internal.model.Dependency dep = new org.apache.karaf.features.internal.model.Dependency();
         dep.setName(feature.getName());
         dep.setVersion(feature.getVersion());
+        // conditional feature will also require parent feature - also with "condition:=true" directive
         addDependency(resource, dep, featureRange, true);
         return resource;
     }
 
+    /**
+     * Constructs {@link Resource} for given non-conditional feature.
+     * @param feature
+     * @param featureRange
+     * @param locToRes
+     * @return
+     * @throws BundleException
+     */
     public static FeatureResource build(Feature feature, String featureRange, Map<String, ? extends Resource> locToRes) throws BundleException {
         FeatureResource resource = new FeatureResource(feature);
         for (BundleInfo info : feature.getBundles()) {
-            if (!info.isDependency()) {
+            if (!info.isDependency() && !info.isBlacklisted()) {
                 Resource res = locToRes.get(info.getLocation());
                 if (res == null) {
                     throw new IllegalStateException("Resource not found for url " + info.getLocation());
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/resolver/ResolverUtil.java b/features/core/src/main/java/org/apache/karaf/features/internal/resolver/ResolverUtil.java
index 2c23201..ec0744b 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/resolver/ResolverUtil.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/resolver/ResolverUtil.java
@@ -46,6 +46,11 @@ public class ResolverUtil
         return null;
     }
 
+    /**
+     * Returns name of owning {@link org.apache.karaf.features.internal.region.Subsystem} for given resource
+     * @param resource
+     * @return
+     */
     public static String getOwnerName(Resource resource)
     {
         List<Requirement> reqs = resource.getRequirements(null);
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/resolver/ResourceUtils.java b/features/core/src/main/java/org/apache/karaf/features/internal/resolver/ResourceUtils.java
index c1798e9..d1c224c 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/resolver/ResourceUtils.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/resolver/ResourceUtils.java
@@ -69,6 +69,11 @@ public final class ResourceUtils {
         return null;
     }
 
+    /**
+     * If the resource has <code>type=karaf.feature</code> capability, returns its ID (name[/version]).
+     * @param resource
+     * @return
+     */
     public static String getFeatureId(Resource resource) {
         List<Capability> caps = resource.getCapabilities(null);
         for (Capability cap : caps) {
@@ -144,6 +149,12 @@ public final class ResourceUtils {
         }
     }
 
+    /**
+     * <p>Changes feature identifier (<code>name[/version]</code>) into a requirement specification.</p>
+     * <p>The OSGi manifest header for a feature will be: <code>osgi.identity;osgi.identity=feature-name;type=karaf.feature[;version=feature-version];filter:=filter-from-attrs</code></p>
+     * @param feature
+     * @return
+     */
     public static String toFeatureRequirement(String feature) {
         String[] parts = feature.split("/");
         Map<String, Object> attrs = new StringArrayMap<>(parts.length > 1 ? 3 : 2);
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/BundleInstallSupport.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/BundleInstallSupport.java
index 5865592..fb31a7a 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/BundleInstallSupport.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/BundleInstallSupport.java
@@ -33,6 +33,11 @@ import org.osgi.framework.InvalidSyntaxException;
 import org.osgi.resource.Resource;
 import org.osgi.resource.Wire;
 
+/**
+ * <p>Interface to interact with OSGi framework.</p>
+ * <p>Bundles are installed into {@link org.eclipse.equinox.region.Region regions} and {@link Feature features}
+ * are used only to get their configs and libraries.</p>
+ */
 public interface BundleInstallSupport {
 
     void print(String message, boolean verbose);
@@ -71,14 +76,18 @@ public interface BundleInstallSupport {
     FrameworkInfo getInfo();
 
     void unregister();
-    
+
+    /**
+     * <p>Low-level state of system, provides information about start levels (initial and current), system bundle,
+     * bundle of features service and entire map of bundle IDs to {@link Bundle} instances.</p>
+     * <p>There's no relation to {@link org.eclipse.equinox.region.Region regions}.</p>
+     */
     class FrameworkInfo {
         public Bundle ourBundle;
         public Bundle systemBundle;
         public int initialBundleStartLevel;
         public int currentStartLevel;
         public Map<Long, Bundle> bundles = new HashMap<>();
-
     }
 
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/BundleInstallSupportImpl.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/BundleInstallSupportImpl.java
index 232c57b..24c2fbf 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/BundleInstallSupportImpl.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/BundleInstallSupportImpl.java
@@ -59,6 +59,10 @@ import org.osgi.resource.Wire;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Interaction with OSGi framework, where bundles are installed into it via {@link RegionDigraph}. After a bundle
+ * is installed, it may be controlled in standard way via {@link Bundle} interface.
+ */
 public class BundleInstallSupportImpl implements BundleInstallSupport {
     private static final Logger LOGGER = LoggerFactory.getLogger(BundleInstallSupportImpl.class);
     
@@ -311,7 +315,7 @@ public class BundleInstallSupportImpl implements BundleInstallSupport {
         for (Bundle bundle : systemBundleContext.getBundles()) {
             info.bundles.put(bundle.getBundleId(), bundle);
         }
-        info.systemBundle = info.bundles.get(0);
+        info.systemBundle = info.bundles.get(0L);
         return info;
     }
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/Deployer.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/Deployer.java
index 571e923..3c166d3 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/Deployer.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/Deployer.java
@@ -50,6 +50,7 @@ import org.apache.karaf.features.FeaturesService;
 import org.apache.karaf.features.internal.download.DownloadManager;
 import org.apache.karaf.features.internal.download.StreamProvider;
 import org.apache.karaf.features.internal.region.SubsystemResolver;
+import org.apache.karaf.features.internal.region.SubsystemResolverCallback;
 import org.apache.karaf.features.internal.resolver.FeatureResource;
 import org.apache.karaf.features.internal.resolver.ResolverUtil;
 import org.apache.karaf.features.internal.resolver.ResourceUtils;
@@ -83,8 +84,6 @@ import org.slf4j.LoggerFactory;
 
 import static org.apache.karaf.features.FeaturesService.ROOT_REGION;
 import static org.apache.karaf.features.FeaturesService.UPDATEABLE_URIS;
-import static org.apache.karaf.features.FeaturesService.UPDATE_SNAPSHOTS_ALWAYS;
-import static org.apache.karaf.features.FeaturesService.UPDATE_SNAPSHOTS_CRC;
 import static org.apache.karaf.features.internal.resolver.ResolverUtil.getSymbolicName;
 import static org.apache.karaf.features.internal.resolver.ResolverUtil.getVersion;
 import static org.apache.karaf.features.internal.resolver.ResourceUtils.TYPE_SUBSYSTEM;
@@ -111,7 +110,10 @@ import static org.osgi.framework.namespace.IdentityNamespace.TYPE_BUNDLE;
 
 public class Deployer {
 
-    public interface DeployCallback {
+    /**
+     * Interface through which {@link Deployer} interacts with OSGi framework.
+     */
+    public interface DeployCallback extends SubsystemResolverCallback {
         void print(String message, boolean verbose);
         void saveState(State state);
         void persistResolveRequest(DeploymentRequest request) throws IOException;
@@ -161,11 +163,17 @@ public class Deployer {
     }
 
     /**
-     * <p>Representation of the state of system from the point of view of <em>bundles</em> and <em>features</em></p>
+     * <p>Representation of the state of system from the point of view of <em>installed bundles</em>
+     * and <em>available features</em></p>
      */
     public static class DeploymentState {
-        /** Current {@link State} of system */
+        // part of the deployment state related to features service
+
+        /** Current {@link State} of features service */
         public State state;
+
+        // part of the deployment state related to low level OSGi framework (bundles, no regions)
+
         /** A {@link Bundle} providing {@link FeaturesService} */
         public Bundle serviceBundle;
         /** {@link org.osgi.framework.startlevel.FrameworkStartLevel#getInitialBundleStartLevel()} */
@@ -174,39 +182,97 @@ public class Deployer {
         public int currentStartLevel;
         /** bundle-id -&gt; bundle for all currently installed bundles */
         public Map<Long, Bundle> bundles;
-        /** feature-name/feature-id -&gt; feature for all available features (not only installed) */
-        public Map<String, Feature> features;
-        /** region-name -&gt; ids for bundles installed in region */
+
+        // part of the deployment state related to all available features
+
+        /** feature-name -&gt; list of features for different versions for all available features (not only installed) */
+        private Map<String, List<Feature>> features;
+        /** feature-id -&gt; feature (not only installed) */
+        private Map<String, Feature> featuresById;
+
+        // part of the deployment state related to regions
+
+        /** region-name -&gt; ids for bundles installed in region (see {@link State#managedBundles}) */
         public Map<String, Set<Long>> bundlesPerRegion;
         /** region-name -&gt; connected, filtered, region-name -&gt; filter-namespace -&gt; filters */
         public Map<String, Map<String, Map<String, Set<String>>>> filtersPerRegion;
+
+        /**
+         * Returns all features indexed by their name. For each name we have collection of {@link Feature features}
+         * for different versions.
+         * @return
+         */
+        public Map<String, List<Feature>> featuresByName() {
+            return features;
+        }
+
+        /**
+         * Returns all features indexed by their id.
+         * @return
+         */
+        public Map<String, Feature> featuresById() {
+            return featuresById;
+        }
+
+        /**
+         * Sets a list of features and stores it as map of features where the key is <code>name</code> and value is a
+         * list of features with different versions.
+         * @param featuresList
+         */
+        public void partitionFeatures(Collection<Feature> featuresList) {
+            features = new HashMap<>();
+            featuresById = new HashMap<>();
+            for (Feature feature : featuresList) {
+                features.computeIfAbsent(feature.getName(), name -> new ArrayList<>()).add(feature);
+                featuresById.put(feature.getId(), feature);
+            }
+        }
     }
 
     /**
-     * <p>A request to change current {@link State state} of system</p>
+     * <p>A request to change current {@link DeploymentState} of system</p>
      * <p>{@link #requirements} specify target set of system requirements. If new features are installed,
      * requirements should include currently installed features and new ones. If features are being uninstalled,
      * requirements should include currently installed features minus the ones that are removed.</p>
      */
     public static class DeploymentRequest {
-        public Set<String> overrides;
+        /** A bnd macro that changes feature version into a version range. */
         public String featureResolutionRange;
-        public String serviceRequirements;
+        /** Indication of how to handle requirements from <code>osgi.service</code> namespace */
+        public FeaturesService.ServiceRequirementsBehavior serviceRequirements;
+        /** A bnd macro to find update'able version range for bundle versions (e.g., to determine whether to install or update a bundle */
         public String bundleUpdateRange;
-        public String updateSnaphots;
+        /** Indication of when to update bundles (or leave them as they are currently installed) */
+        public FeaturesService.SnapshotUpdateBehavior updateSnaphots;
+
+        /**
+         * Additional {@link Repository} that'll be used to resolve unresolved, non-optional requirements if
+         * they're not resolved against current
+         */
         public Repository globalRepository;
 
+        /** Target/desired set of requirements per region */
         public Map<String, Set<String>> requirements;
+        /** Target/desired set of features state per region */
         public Map<String, Map<String, FeatureState>> stateChanges;
+        /** Deployment options */
         public EnumSet<FeaturesService.Option> options;
+
+        /** File to store result of deployment */
         public String outputFile;
 
+        /**
+         * Prepare standard, empty DeploymentRequest, where feature versions are taken literally (no ranges)
+         * and bundle updates use <em>natural</em> range to determine between install and update (update on micro
+         * digit in version, e.g., <code>2.1.0</code> -&gt; <code>2.1.2</code>, but not <code>2.1.2</code> -&gt;
+         * <code>2.2.0</code>).
+         * @return
+         */
         public static DeploymentRequest defaultDeploymentRequest() {
             DeploymentRequest request = new DeploymentRequest();
             request.bundleUpdateRange = FeaturesService.DEFAULT_BUNDLE_UPDATE_RANGE;
             request.featureResolutionRange = FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE;
-            request.serviceRequirements = FeaturesService.SERVICE_REQUIREMENTS_DEFAULT;
-            request.overrides = new HashSet<>();
+            request.serviceRequirements = FeaturesService.ServiceRequirementsBehavior.Default;
             request.requirements = new HashMap<>();
             request.stateChanges = new HashMap<>();
             request.options = EnumSet.noneOf(FeaturesService.Option.class);
@@ -214,15 +280,24 @@ public class Deployer {
         }
     }
 
+    /**
+     * Deployment information for all regions
+     */
     static class Deployment {
         Map<Long, Long> bundleChecksums = new HashMap<>();
         Map<Resource, Bundle> resToBnd = new HashMap<>();
         Map<String, RegionDeployment> regions = new HashMap<>();
     }
 
+    /**
+     * Deployment information for single region
+     */
     static class RegionDeployment {
+        /** new {@link Resource resources} to install */
         List<Resource> toInstall = new ArrayList<>();
+        /** existing {@link Bundle bundles} to remove */
         List<Bundle> toDelete = new ArrayList<>();
+        /** existing {@link Bundle bundles} to update using new {@link Resource resources} */
         Map<Bundle, Resource> toUpdate = new HashMap<>();
     }
 
@@ -239,6 +314,29 @@ public class Deployer {
     }
 
     /**
+     * Performs full deployment - with prerequisites
+     *
+     * @param dstate  deployment state
+     * @param request deployment request
+     * @throws Exception in case of deployment failure.
+     */
+    public void deployFully(DeploymentState dstate, DeploymentRequest request) throws Exception {
+        Set<String> prereqs = new HashSet<>();
+        while (true) {
+            try {
+                deploy(dstate, request);
+                break;
+            } catch (Deployer.PartialDeploymentException e) {
+                if (!prereqs.containsAll(e.getMissing())) {
+                    prereqs.addAll(e.getMissing());
+                } else {
+                    throw new Exception("Deployment aborted due to loop in missing prerequisites: " + e.getMissing());
+                }
+            }
+        }
+    }
+
+    /**
      * Perform a deployment.
      *
      * @param dstate  deployment state
@@ -261,67 +359,30 @@ public class Deployer {
 
         // TODO: add an option to unmanage bundles instead of uninstalling those
 
+        // current managed bundles per region, as known by o.a.k.features.internal.service.FeaturesServiceImpl.state
         Map<String, Set<Long>> managedBundles = copy(dstate.state.managedBundles);
 
-        Map<String, Set<Bundle>> unmanagedBundles = apply(diff(dstate.bundlesPerRegion, dstate.state.managedBundles),
-                map(dstate.bundles));
+        // current not managed (by FeaturesService state) bundles per region, as known by o.a.k.features.internal.service.BundleInstallSupportImpl.digraph
+        // "unmanaged" means "not installed via features service"
+        Map<String, Set<Long>> diff = diff(dstate.bundlesPerRegion, dstate.state.managedBundles);
+        Map<String, Set<Bundle>> unmanagedBundles = apply(diff, map(dstate.bundles));
 
-        // Resolve
+        // Use Subsystem and Felix resolver
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, manager);
-        resolver.prepare(
-                dstate.features.values(),
-                request.requirements,
-                apply(unmanagedBundles, adapt(BundleRevision.class))
-        );
-        Set<String> prereqs = resolver.collectPrerequisites();
-        if (!prereqs.isEmpty()) {
-            for (Iterator<String> iterator = prereqs.iterator(); iterator.hasNext(); ) {
-                String prereq = iterator.next();
-                String[] parts = prereq.split("/");
-                String name = parts[0];
-                String version = parts[1];
-                VersionRange range = getRange(version, request.featureResolutionRange);
-                boolean found = false;
-                for (Set<String> featureSet : dstate.state.installedFeatures.values()) {
-                    for (String feature : featureSet) {
-                        String[] p = feature.split("/");
-                        found = name.equals(p[0]) && range.contains(VersionTable.getVersion(p[1]));
-                        if (found) {
-                            break;
-                        }
-                    }
-                    if (found) {
-                        break;
-                    }
-                }
-                if (found) {
-                    iterator.remove();
-                }
-            }
-        }
-        if (!prereqs.isEmpty()) {
-            if (request.requirements.get(ROOT_REGION).containsAll(prereqs)) {
-                throw new CircularPrerequisiteException(prereqs);
-            }
-            DeploymentRequest newRequest = new DeploymentRequest();
-            newRequest.bundleUpdateRange = request.bundleUpdateRange;
-            newRequest.featureResolutionRange = request.featureResolutionRange;
-            newRequest.serviceRequirements = request.serviceRequirements;
-            newRequest.globalRepository = request.globalRepository;
-            newRequest.options = request.options;
-            newRequest.overrides = request.overrides;
-            newRequest.requirements = copy(dstate.state.requirements);
-            for (String prereq : prereqs) {
-                addToMapSet(newRequest.requirements, ROOT_REGION, prereq);
-            }
-            newRequest.stateChanges = Collections.emptyMap();
-            newRequest.updateSnaphots = request.updateSnaphots;
-            deploy(dstate, newRequest);
-            throw new PartialDeploymentException(prereqs);
-        }
+        resolver.setDeployCallback(callback);
+        Map<String, Set<BundleRevision>> unmanagedBundleRevisions = apply(unmanagedBundles, adapt(BundleRevision.class));
+
+        // preparation - creating OSGi resources with reqs and caps for regions and features
+        resolver.prepare(dstate.featuresByName(), request.requirements, unmanagedBundleRevisions);
 
+        // if some features have prerequisites, we have to deploy them first - this method may throw Exception
+        // to start another cycle of deployment
+        handlePrerequisites(dstate, request, resolver);
+
+        // when there are no more prerequisites, we can resolve Subsystems and Features using Felix resolver
+        // Subsystem resolver will have then full information about new bundles and bundle updates or removals
+        // per region
         resolver.resolve(
-                request.overrides,
                 request.featureResolutionRange,
                 request.serviceRequirements,
                 request.globalRepository,
@@ -330,6 +391,7 @@ public class Deployer {
         Map<String, StreamProvider> providers = resolver.getProviders();
         Map<String, Set<Resource>> featuresPerRegion = resolver.getFeaturesPerRegions();
         Map<String, Set<String>> installedFeatures = apply(featuresPerRegion, featureId());
+        // changes to current state - added and removed features
         Map<String, Set<String>> newFeatures = diff(installedFeatures, dstate.state.installedFeatures);
         Map<String, Set<String>> delFeatures = diff(dstate.state.installedFeatures, installedFeatures);
 
@@ -365,7 +427,7 @@ public class Deployer {
             }
         }
 
-        // Compute information for each bundle
+        // Compute information for each bundle (region -> location -> BundleInfo)
         Map<String, Map<String, BundleInfo>> bundleInfos = resolver.getBundleInfos();
 
         //
@@ -800,7 +862,7 @@ public class Deployer {
                     addToMapSet(managedBundles, name, bundle.getBundleId());
                     deployment.resToBnd.put(resource, bundle);
                     // save a checksum of installed snapshot bundle
-                    if (UPDATE_SNAPSHOTS_CRC.equals(request.updateSnaphots)
+                    if (FeaturesService.SnapshotUpdateBehavior.Crc == request.updateSnaphots
                             && isUpdateable(resource) && !deployment.bundleChecksums.containsKey(bundle.getBundleId())) {
                         deployment.bundleChecksums.put(bundle.getBundleId(), crc);
                     }
@@ -847,7 +909,7 @@ public class Deployer {
         //
         if (!newFeatures.isEmpty()) {
             Set<String> featureIds = flatten(newFeatures);
-            for (Feature feature : dstate.features.values()) {
+            for (Feature feature : dstate.featuresById.values()) {
                 if (featureIds.contains(feature.getId())) {
                     callback.installConfigs(feature);
                     callback.installLibraries(feature);
@@ -935,7 +997,7 @@ public class Deployer {
         // Call listeners
         for (Map.Entry<String, Set<String>> entry : delFeatures.entrySet()) {
             for (String name : entry.getValue()) {
-                Feature feature = dstate.features.get(name);
+                Feature feature = dstate.featuresById.get(name);
                 if (feature != null) {
                     callback.callListeners(new FeatureEvent(FeatureEvent.EventType.FeatureUninstalled, feature, entry.getKey(), false));
                 }
@@ -943,7 +1005,7 @@ public class Deployer {
         }
         for (Map.Entry<String, Set<String>> entry : newFeatures.entrySet()) {
             for (String name : entry.getValue()) {
-                Feature feature = dstate.features.get(name);
+                Feature feature = dstate.featuresById.get(name);
                 if (feature != null) {
                     callback.callListeners(new FeatureEvent(FeatureEvent.EventType.FeatureInstalled, feature, entry.getKey(), false));
                 }
@@ -954,6 +1016,55 @@ public class Deployer {
         print("Done.", verbose);
     }
 
+    private void handlePrerequisites(DeploymentState dstate, DeploymentRequest request, SubsystemResolver resolver)
+            throws Exception {
+        Set<String> prereqs = resolver.collectPrerequisites();
+        if (!prereqs.isEmpty()) {
+            for (Iterator<String> iterator = prereqs.iterator(); iterator.hasNext(); ) {
+                String prereq = iterator.next();
+                String[] parts = prereq.split("/");
+                String name = parts[0];
+                String version = parts[1];
+                VersionRange range = getRange(version, request.featureResolutionRange);
+                boolean found = false;
+                for (Set<String> featureSet : dstate.state.installedFeatures.values()) {
+                    for (String feature : featureSet) {
+                        String[] p = feature.split("/");
+                        found = name.equals(p[0]) && range.contains(VersionTable.getVersion(p[1]));
+                        if (found) {
+                            break;
+                        }
+                    }
+                    if (found) {
+                        break;
+                    }
+                }
+                if (found) {
+                    iterator.remove();
+                }
+            }
+        }
+        if (!prereqs.isEmpty()) {
+            if (request.requirements.get(ROOT_REGION).containsAll(prereqs)) {
+                throw new CircularPrerequisiteException(prereqs);
+            }
+            DeploymentRequest newRequest = new DeploymentRequest();
+            newRequest.bundleUpdateRange = request.bundleUpdateRange;
+            newRequest.featureResolutionRange = request.featureResolutionRange;
+            newRequest.serviceRequirements = request.serviceRequirements;
+            newRequest.globalRepository = request.globalRepository;
+            newRequest.options = request.options;
+            newRequest.requirements = copy(dstate.state.requirements);
+            for (String prereq : prereqs) {
+                addToMapSet(newRequest.requirements, ROOT_REGION, prereq);
+            }
+            newRequest.stateChanges = Collections.emptyMap();
+            newRequest.updateSnaphots = request.updateSnaphots;
+            deploy(dstate, newRequest);
+            throw new PartialDeploymentException(prereqs);
+        }
+    }
+
     private static VersionRange getRange(String version, String featureResolutionRange) {
         VersionRange range;
         if (version.equals("0.0.0")) {
@@ -1251,10 +1362,10 @@ public class Deployer {
                         // and flag it as to update
                         if (isUpdateable(resource)) {
                             // Always update snapshots
-                            if (UPDATE_SNAPSHOTS_ALWAYS.equalsIgnoreCase(request.updateSnaphots)) {
+                            if (FeaturesService.SnapshotUpdateBehavior.Always == request.updateSnaphots) {
                                 LOGGER.debug("Update snapshot for " + bundle.getLocation());
                                 deployment.toUpdate.put(bundle, resource);
-                            } else if (UPDATE_SNAPSHOTS_CRC.equalsIgnoreCase(request.updateSnaphots)) {
+                            } else if (FeaturesService.SnapshotUpdateBehavior.Crc == request.updateSnaphots) {
                                 // Retrieve current bundle checksum
                                 long oldCrc;
                                 if (dstate.state.bundleChecksums.containsKey(bundleId)) {
@@ -1280,7 +1391,7 @@ public class Deployer {
                                             result.bundleChecksums.put(bundleId, oldCrc);
                                         }
                                     } catch (Throwable t) {
-                                        LOGGER.debug("Error calculating checksum for bundle: %s", bundle, t);
+                                        LOGGER.debug("Error calculating checksum for bundle: {}", bundle, t);
                                     }
                                 }
                                 // Compute new bundle checksum
@@ -1354,7 +1465,7 @@ public class Deployer {
 
     protected boolean isUpdateable(Resource resource) {
         String uri = getUri(resource);
-        return uri.matches(UPDATEABLE_URIS);
+        return uri != null && uri.matches(UPDATEABLE_URIS);
     }
 
     protected List<Bundle> getBundlesToStart(Collection<Bundle> bundles, Bundle serviceBundle) {
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessor.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessor.java
index 7a74fae..e8983d9 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessor.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessor.java
@@ -34,6 +34,13 @@ public interface FeaturesProcessor {
     boolean isRepositoryBlacklisted(String uri);
 
     /**
+     * Checks whether given bundle URI is <em>blacklisted</em>
+     * @param uri
+     * @return
+     */
+    boolean isBundleBlacklisted(String uri);
+
+    /**
      * Processes original {@link Features JAXB model of features}
      * @param features
      */
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessorImpl.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessorImpl.java
index c6ed5a6..4e3fa04 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessorImpl.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesProcessorImpl.java
@@ -50,6 +50,8 @@ public class FeaturesProcessorImpl implements FeaturesProcessor {
     public static Logger LOG = LoggerFactory.getLogger(FeaturesProcessorImpl.class);
 
     private static FeaturesProcessingSerializer serializer = new FeaturesProcessingSerializer();
+
+    // empty, but fully functional features processing configuration
     private FeaturesProcessing processing = new FeaturesProcessing();
 
     /**
@@ -66,7 +68,7 @@ public class FeaturesProcessorImpl implements FeaturesProcessor {
                     processing = serializer.read(stream);
                 }
             } catch (FileNotFoundException e) {
-                LOG.warn("Can't find feature processing file (" + featureModificationsURI + ")");
+                LOG.debug("Can't find feature processing file (" + featureModificationsURI + "), skipping");
             } catch (Exception e) {
                 LOG.warn("Can't initialize feature processor: " + e.getMessage());
             }
@@ -119,11 +121,12 @@ public class FeaturesProcessorImpl implements FeaturesProcessor {
     public void process(Features features) {
         // blacklisting features
         for (Feature feature : features.getFeature()) {
-            feature.setBlacklisted(isFeatureBlacklisted(feature));
+            boolean allBlacklisted = features.isBlacklisted();
+            feature.setBlacklisted(allBlacklisted || isFeatureBlacklisted(feature));
             // blacklisting bundles
-            processBundles(feature.getBundle());
+            processBundles(feature.getBundle(), allBlacklisted);
             for (Conditional c : feature.getConditional()) {
-                processBundles(c.getBundle());
+                processBundles(c.getBundle(), allBlacklisted);
             }
         }
 
@@ -132,9 +135,9 @@ public class FeaturesProcessorImpl implements FeaturesProcessor {
         // TODO: overriding features
     }
 
-    private void processBundles(List<Bundle> bundles) {
+    private void processBundles(List<Bundle> bundles, boolean allBlacklisted) {
         for (Bundle bundle : bundles) {
-            boolean bundleBlacklisted = isBundleBlacklisted(bundle.getLocation());
+            boolean bundleBlacklisted = allBlacklisted || isBundleBlacklisted(bundle.getLocation());
             if (bundleBlacklisted) {
                 // blacklisting has higher priority
                 bundle.setBlacklisted(true);
@@ -151,18 +154,23 @@ public class FeaturesProcessorImpl implements FeaturesProcessor {
      * @param bundle
      */
     private void staticOverrideBundle(Bundle bundle) {
+        bundle.setOverriden(BundleInfo.BundleOverrideMode.NONE);
+
         for (BundleReplacements.OverrideBundle override : this.getInstructions().getBundleReplacements().getOverrideBundles()) {
             String originalLocation = bundle.getLocation();
             if (override.getOriginalUriPattern().matches(originalLocation)) {
                 LOG.debug("Overriding bundle location \"" + originalLocation + "\" with \"" + override.getReplacement() + "\"");
                 bundle.setOriginalLocation(originalLocation);
-                bundle.setOverriden(true);
+                if (override.getMode() == BundleReplacements.BundleOverrideMode.MAVEN) {
+                    bundle.setOverriden(BundleInfo.BundleOverrideMode.MAVEN);
+                } else {
+                    bundle.setOverriden(BundleInfo.BundleOverrideMode.OSGI);
+                }
                 bundle.setLocation(override.getReplacement());
-                // last rule wins - no break!!!
+                // TOCHECK: last rule wins - no break!!!
                 //break;
             }
         }
-
     }
 
     @Override
@@ -190,7 +198,8 @@ public class FeaturesProcessorImpl implements FeaturesProcessor {
      * @param location
      * @return
      */
-    private boolean isBundleBlacklisted(String location) {
+    @Override
+    public boolean isBundleBlacklisted(String location) {
         return getInstructions().getBlacklist().isBundleBlacklisted(location);
     }
 
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesServiceConfig.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesServiceConfig.java
index 1f1fdfd..7fb0f26 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesServiceConfig.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesServiceConfig.java
@@ -53,18 +53,50 @@ public class FeaturesServiceConfig {
      */
     public final String serviceRequirements;
 
+    /**
+     * Location of <code>etc/blacklisted.properties</code>
+     */
+    @Deprecated
     public final String blacklisted;
+
+    /**
+     * Location of <code>etc/org.apache.karaf.features.xml</code>
+     */
     public final String featureModifications;
+
+    /**
+     * Location of <code>etc/overrides.properties</code>
+     */
+    @Deprecated
     public final String overrides;
 
     public FeaturesServiceConfig() {
         this(null, null, null);
     }
 
+    public FeaturesServiceConfig(String featureModifications) {
+        this(null, FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE, FeaturesService.DEFAULT_BUNDLE_UPDATE_RANGE, null, 1, 0, 0, null, featureModifications, null);
+    }
+
+    @Deprecated
     public FeaturesServiceConfig(String overrides, String blacklisted, String featureModifications) {
         this(overrides, FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE, FeaturesService.DEFAULT_BUNDLE_UPDATE_RANGE, null, 1, 0, 0, blacklisted, featureModifications, null);
     }
 
+    public FeaturesServiceConfig(String featureResolutionRange, String bundleUpdateRange, String updateSnapshots, int downloadThreads, long scheduleDelay, int scheduleMaxRun, String featureModifications, String serviceRequirements) {
+        this.overrides = null;
+        this.featureResolutionRange = featureResolutionRange;
+        this.bundleUpdateRange = bundleUpdateRange;
+        this.updateSnapshots = updateSnapshots;
+        this.downloadThreads = downloadThreads;
+        this.scheduleDelay = scheduleDelay;
+        this.scheduleMaxRun = scheduleMaxRun;
+        this.blacklisted = null;
+        this.featureModifications = featureModifications;
+        this.serviceRequirements = serviceRequirements;
+    }
+
+    @Deprecated
     public FeaturesServiceConfig(String overrides, String featureResolutionRange, String bundleUpdateRange, String updateSnapshots, int downloadThreads, long scheduleDelay, int scheduleMaxRun, String blacklisted, String featureModifications, String serviceRequirements) {
         this.overrides = overrides;
         this.featureResolutionRange = featureResolutionRange;
@@ -77,4 +109,5 @@ public class FeaturesServiceConfig {
         this.featureModifications = featureModifications;
         this.serviceRequirements = serviceRequirements;
     }
+
 }
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesServiceImpl.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesServiceImpl.java
index 5a6d788..0302478 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesServiceImpl.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/FeaturesServiceImpl.java
@@ -51,6 +51,7 @@ import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
 import org.apache.felix.utils.version.VersionCleaner;
+import org.apache.karaf.features.BundleInfo;
 import org.apache.karaf.features.DeploymentEvent;
 import org.apache.karaf.features.DeploymentListener;
 import org.apache.karaf.features.Feature;
@@ -125,6 +126,9 @@ public class FeaturesServiceImpl implements FeaturesService, Deployer.DeployCall
 
     // Synchronized on lock
     private final Object lock = new Object();
+    /**
+     * {@link State} persisted to data directory of features.core bundle.
+     */
     private final State state = new State();
 
     private final ExecutorService executor;
@@ -223,7 +227,7 @@ public class FeaturesServiceImpl implements FeaturesService, Deployer.DeployCall
                 // Make sure we don't store bundle checksums if
                 // it has been disabled through configadmin
                 // so that we don't keep out-of-date checksums.
-                if (!UPDATE_SNAPSHOTS_CRC.equalsIgnoreCase(cfg.updateSnapshots)) {
+                if (!SnapshotUpdateBehavior.Crc.getValue().equalsIgnoreCase(cfg.updateSnapshots)) {
                     state.bundleChecksums.clear();
                 }
                 storage.save(state);
@@ -965,7 +969,7 @@ public class FeaturesServiceImpl implements FeaturesService, Deployer.DeployCall
         dstate.currentStartLevel = info.currentStartLevel;
         dstate.bundles = info.bundles;
         // Features
-        dstate.features = featuresById;
+        dstate.partitionFeatures(featuresById.values());
         RegionDigraph regionDigraph = installSupport.getDiGraphCopy();
         dstate.bundlesPerRegion = DigraphHelper.getBundlesPerRegion(regionDigraph);
         dstate.filtersPerRegion = DigraphHelper.getPolicies(regionDigraph);
@@ -973,13 +977,12 @@ public class FeaturesServiceImpl implements FeaturesService, Deployer.DeployCall
     }
 
     private Deployer.DeploymentRequest getDeploymentRequest(Map<String, Set<String>> requirements, Map<String, Map<String, FeatureState>> stateChanges, EnumSet<Option> options, String outputFile) {
-        Deployer.DeploymentRequest request = new Deployer.DeploymentRequest();
+        Deployer.DeploymentRequest request = Deployer.DeploymentRequest.defaultDeploymentRequest();
         request.bundleUpdateRange = cfg.bundleUpdateRange;
         request.featureResolutionRange = cfg.featureResolutionRange;
-        request.serviceRequirements = cfg.serviceRequirements;
-        request.updateSnaphots = cfg.updateSnapshots;
+        request.serviceRequirements = ServiceRequirementsBehavior.fromString(cfg.serviceRequirements);
+        request.updateSnaphots = SnapshotUpdateBehavior.fromString(cfg.updateSnapshots);
         request.globalRepository = globalRepository;
-        request.overrides = Overrides.loadOverrides(cfg.overrides);
         request.requirements = requirements;
         request.stateChanges = stateChanges;
         request.options = options;
@@ -1120,6 +1123,11 @@ public class FeaturesServiceImpl implements FeaturesService, Deployer.DeployCall
         installSupport.installLibraries(feature);
     }
 
+    @Override
+    public void bundleBlacklisted(BundleInfo bundleInfo) {
+
+    }
+
     private String join(Collection<FeatureReq> reqs) {
         return reqs.stream().map(FeatureReq::toString).collect(Collectors.joining(","));
     }
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/Overrides.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/Overrides.java
index 7d6f3e2..3d2d9b7 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/Overrides.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/Overrides.java
@@ -70,7 +70,10 @@ public final class Overrides {
      * @param resources the list of resources to resolve
      * @param overrides list of bundle overrides
      * @param <T> the resource type.
+     *
+     * @deprecated Use {@link #override(Map, Map)}
      */
+    @Deprecated
     public static <T extends Resource> void override(Map<String, T> resources, Collection<String> overrides) {
         // Do override replacement
         for (Clause override : Parser.parseClauses(overrides.toArray(new String[overrides.size()]))) {
@@ -90,6 +93,35 @@ public final class Overrides {
     }
 
     /**
+     * <p>Input map of resources is checked - if there are matching resources in <code>overridenFrom</code> and
+     * there's <strong>no</strong> symbolic name matching, resource for original URI is restored.
+     * Effectively this method reverts {@link org.apache.karaf.features.internal.model.processing.BundleReplacements.BundleOverrideMode#MAVEN maven}
+     * override mode if there's no symbolic name matching.</p>
+     *
+     * <p>About versions - with previous <code>${karaf.etc}/overrides.properties</code> both symbolic name
+     * should match <strong>and</strong> versions should be compatible - either using implicit rules or by means
+     * of <code>range</code> clause. With new mechanism, we know we should use OSGi or Maven override, but we
+     * loose information about OSGi version range matching - we assume then that version rules were applied at
+     * features JAXB model processing time.</p>
+     *
+     * @param resources
+     * @param overridenFrom
+     * @param <T>
+     */
+    public static <T extends Resource> void override(Map<String, T> resources, Map<String, T> overridenFrom) {
+        for (Map.Entry<String, T> original : overridenFrom.entrySet()) {
+            T replacement = resources.get(original.getKey());
+            if (replacement == null) {
+                continue;
+            }
+            if (!shouldOverride(original.getValue(), replacement, "[0,*)")) {
+                // bring back original version
+                resources.put(original.getKey(), original.getValue());
+            }
+        }
+    }
+
+    /**
      * @param resource resource to be overriden
      * @param explicitRange range set on the override clause
      * @return if the resource should be overriden by the given override
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/RepositoryImpl.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/RepositoryImpl.java
index 3d76c23..c734e8a 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/RepositoryImpl.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/RepositoryImpl.java
@@ -89,6 +89,7 @@ public class RepositoryImpl implements Repository {
 
     public void setBlacklisted(boolean blacklisted) {
         this.blacklisted = blacklisted;
+        features.setBlacklisted(blacklisted);
     }
 
     private void load(boolean validate) {
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/service/State.java b/features/core/src/main/java/org/apache/karaf/features/internal/service/State.java
index b89947b..e67d4a7 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/service/State.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/service/State.java
@@ -25,32 +25,33 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import org.apache.karaf.features.internal.util.MapUtils;
 
 /**
- * <p>Representation of the state of system from the point of view of <em>requirements</em>.
+ * <p>Representation of the state of features service from the point of view of <em>logical requirements</em>
+ * which are translated into bundles and features installed in {@link org.eclipse.equinox.region.Region regions}.
  * It's a collection of:<ul>
  *     <li>used repositories</li>
- *     <li>region -&gt; requirements</li>
- *     <li>region -&gt; installed features</li>
+ *     <li>region -&gt; requirements (logical feature requirements)</li>
+ *     <li>region -&gt; installed features (actual features installed - including conditionals and dependant features)</li>
  *     <li>region -&gt; installed features -&gt; state of feature installation</li>
- *     <li>region -&gt; bundle ids</li>
+ *     <li>region -&gt; bundle ids (for bundles installed via features service, a.k.a. <em>managed bundles</em>)</li>
  *     <li>bundle id -&gt; checksum</li>
  * </ul></p>
  * <p>State is replaced (swapped) after uninstalling/updating/installing all the bundles as requested, but
- * before resolving/refreshing them.</p>
+ * before resolving/refreshing them. Before State is set, work is done on the instance of Deployer.DeploymentState.</p>
  */
 public class State {
 
     public final AtomicBoolean bootDone = new AtomicBoolean();
     public final Set<String> repositories = new TreeSet<>();
     
-    /** Map from region name to Set of feature requirements (name/version range) */
+    /** Map from region name to Set of feature requirements (<code>feature:name/version-range</code>) */
     public final Map<String, Set<String>> requirements = new HashMap<>();
-    /** Map from region name to Set of feature id (name/version) */
+    /** Map from region name to Set of feature id (<code>name/version</code>) */
     public final Map<String, Set<String>> installedFeatures = new HashMap<>();
     
-    /** State of features by region and feature id (name/version) */
+    /** State of features by region and feature id (<code>name/version</code>) */
     public final Map<String, Map<String, String>> stateFeatures = new HashMap<>();
 
-    /** Map from region name to Set of installed bundle ids */
+    /** Map from region name to Set of ids of bundles installed via some features or requirements */
     public final Map<String, Set<Long>> managedBundles = new HashMap<>();
     /** Map from bundle id to bundle's java.util.zip.CRC32 */
     public final Map<Long, Long> bundleChecksums = new HashMap<>();
diff --git a/features/core/src/main/java/org/apache/karaf/features/internal/util/MapUtils.java b/features/core/src/main/java/org/apache/karaf/features/internal/util/MapUtils.java
index 92128f0..f3dd158 100644
--- a/features/core/src/main/java/org/apache/karaf/features/internal/util/MapUtils.java
+++ b/features/core/src/main/java/org/apache/karaf/features/internal/util/MapUtils.java
@@ -42,6 +42,18 @@ public final class MapUtils {
         return inverted;
     }
 
+    /**
+     * Changes mapping from <code>S</code> -&gt; <code>Set&lt;T&gt;</code> to mapping
+     * <code>S</code> -&gt; <code>Set&lt;U&gt;</code> using {@link Function} that can change <code>T</code> to
+     * <code>U</code>.
+     *
+     * @param mapset
+     * @param function
+     * @param <S> A key that maps to set of values in input and result map
+     * @param <T> A type of input set of values
+     * @param <U> A type of result set of values
+     * @return
+     */
     public static <S, T, U> Map<S, Set<U>> apply(Map<S, Set<T>> mapset, Function<T, U> function) {
         Map<S, Set<U>> result = new HashMap<>(mapset.size());
         for (Map.Entry<S, Set<T>> entry : mapset.entrySet()) {
@@ -94,6 +106,15 @@ public final class MapUtils {
         return set;
     }
 
+    /**
+     * Produces a map where each set value in <code>from</code> map has every element that's in <code>to</code>
+     * map's set value removed. If <code>from</code> map is left with empty set value, entire set is removed.
+     * @param from
+     * @param to
+     * @param <S>
+     * @param <T>
+     * @return
+     */
     public static <S, T> Map<S, Set<T>> diff(Map<S, Set<T>> from, Map<S, Set<T>> to) {
         Map<S, Set<T>> diff = copyMapSet(from);
         remove(diff, to);
@@ -118,6 +139,14 @@ public final class MapUtils {
         }
     }
 
+    /**
+     * Removes all values from <code>toRemove</code> map from <code>from</code> map. After removal, set values
+     * in <code>from</code> map may be smaller or removed entirely (if there are no more values in given set).
+     * @param from
+     * @param toRemove
+     * @param <S>
+     * @param <T>
+     */
     public static <S, T> void remove(Map<S, Set<T>> from, Map<S, Set<T>> toRemove) {
         for (Map.Entry<S, Set<T>> entry : toRemove.entrySet()) {
             Set<T> s = from.get(entry.getKey());
diff --git a/features/core/src/test/java/org/apache/karaf/features/internal/region/FeaturesDependenciesTest.java b/features/core/src/test/java/org/apache/karaf/features/internal/region/FeaturesDependenciesTest.java
index c482534..036e27d 100644
--- a/features/core/src/test/java/org/apache/karaf/features/internal/region/FeaturesDependenciesTest.java
+++ b/features/core/src/test/java/org/apache/karaf/features/internal/region/FeaturesDependenciesTest.java
@@ -26,8 +26,10 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.felix.resolver.ResolverImpl;
+import org.apache.karaf.features.Feature;
 import org.apache.karaf.features.FeaturesService;
 import org.apache.karaf.features.internal.resolver.Slf4jResolverLog;
+import org.apache.karaf.features.internal.service.Deployer;
 import org.apache.karaf.features.internal.service.RepositoryImpl;
 import org.apache.karaf.features.internal.support.TestDownloadManager;
 import org.junit.Test;
@@ -141,11 +143,10 @@ public class FeaturesDependenciesTest {
         }
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data8"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                 requirements,
                 Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                 null, null, null);
 
         verify(resolver, expected);
@@ -203,4 +204,10 @@ public class FeaturesDependenciesTest {
                 + cap.getAttributes().get(IdentityNamespace.CAPABILITY_VERSION_ATTRIBUTE);
     }
 
+    private Map<String, List<Feature>> partitionByName(Feature[] features) {
+        Deployer.DeploymentState ds = new Deployer.DeploymentState();
+        ds.partitionFeatures(Arrays.asList(features));
+        return ds.featuresByName();
+    }
+
 }
diff --git a/features/core/src/test/java/org/apache/karaf/features/internal/region/SubsystemTest.java b/features/core/src/test/java/org/apache/karaf/features/internal/region/SubsystemTest.java
index f6b6563..8984aba 100644
--- a/features/core/src/test/java/org/apache/karaf/features/internal/region/SubsystemTest.java
+++ b/features/core/src/test/java/org/apache/karaf/features/internal/region/SubsystemTest.java
@@ -26,9 +26,12 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.felix.resolver.ResolverImpl;
+import org.apache.karaf.features.BundleInfo;
 import org.apache.karaf.features.Feature;
 import org.apache.karaf.features.FeaturesService;
+import org.apache.karaf.features.internal.model.Bundle;
 import org.apache.karaf.features.internal.resolver.Slf4jResolverLog;
+import org.apache.karaf.features.internal.service.Deployer;
 import org.apache.karaf.features.internal.service.RepositoryImpl;
 import org.apache.karaf.features.internal.support.TestDownloadManager;
 import org.junit.Test;
@@ -62,11 +65,35 @@ public class SubsystemTest {
         addToMapSet(expected, "root/apps1", "b/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data1"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                          features,
                          Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                         FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+                         null, null, null);
+
+        verify(resolver, expected);
+    }
+
+    @Test
+    public void test1a() throws Exception {
+        RepositoryImpl repo = new RepositoryImpl(getClass().getResource("data1/features.xml").toURI());
+
+        Map<String, Set<String>> features = new HashMap<>();
+        addToMapSet(features, "root", "f1");
+        addToMapSet(features, "root/apps1", "f2");
+        addToMapSet(features, "root/apps1/regionx", "bundle:d");
+
+        Map<String, Set<String>> expected = new HashMap<>();
+        addToMapSet(expected, "root", "a/1.0.0");
+        addToMapSet(expected, "root", "c/1.0.0");
+        addToMapSet(expected, "root/apps1", "b/1.0.0");
+        addToMapSet(expected, "root/apps1/regionx", "d/1.0.0");
+
+        SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data1"));
+        resolver.prepare(partitionByName(repo.getFeatures()),
+                         features,
+                         Collections.emptyMap());
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                          null, null, null);
 
         verify(resolver, expected);
@@ -93,20 +120,24 @@ public class SubsystemTest {
         addToMapSet(expected, "root/apps2#f1", "a/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data2"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                          features,
                          Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                         FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                          null, null, null);
 
         verify(resolver, expected);
     }
 
     @Test
-    public void testOverrides() throws Exception {
+    public void testOverridesCompatibilityModeSymbolicNameMatches() throws Exception {
         RepositoryImpl repo = new RepositoryImpl(getClass().getResource("data3/features.xml").toURI());
 
+        // this is normally done by features processor
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setOverriden(BundleInfo.BundleOverrideMode.OSGI);
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setOriginalLocation("a");
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setLocation("b");
+
         Map<String, Set<String>> features = new HashMap<>();
         addToMapSet(features, "root/apps1", "f1");
 
@@ -114,11 +145,62 @@ public class SubsystemTest {
         addToMapSet(expected, "root/apps1", "a/1.0.1");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data3"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
+                         features,
+                         Collections.emptyMap());
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+                         null, null, null);
+
+        verify(resolver, expected);
+    }
+
+    @Test
+    public void testOverridesCompatibilityModeSymbolicDoesNotMatch() throws Exception {
+        RepositoryImpl repo = new RepositoryImpl(getClass().getResource("data3/features.xml").toURI());
+
+        // this is normally done by features processor
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setOverriden(BundleInfo.BundleOverrideMode.OSGI);
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setOriginalLocation("a");
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setLocation("c");
+
+        Map<String, Set<String>> features = new HashMap<>();
+        addToMapSet(features, "root/apps1", "f1");
+
+        // we expect override not to be used - symbolic name between a and c doesn't match
+        Map<String, Set<String>> expected = new HashMap<>();
+        addToMapSet(expected, "root/apps1", "a/1.0.0");
+
+        SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data3"));
+        resolver.prepare(partitionByName(repo.getFeatures()),
+                         features,
+                         Collections.emptyMap());
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+                         null, null, null);
+
+        verify(resolver, expected);
+    }
+
+    @Test
+    public void testOverridesMavenMode() throws Exception {
+        RepositoryImpl repo = new RepositoryImpl(getClass().getResource("data3/features.xml").toURI());
+
+        // this is normally done by features processor
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setOverriden(BundleInfo.BundleOverrideMode.MAVEN);
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setOriginalLocation("a");
+        ((Bundle)repo.getFeatures()[0].getBundles().get(0)).setLocation("c");
+
+        Map<String, Set<String>> features = new HashMap<>();
+        addToMapSet(features, "root/apps1", "f1");
+
+        // we expect override to be used - symbolic name between a and c doesn't match, but we don't care
+        Map<String, Set<String>> expected = new HashMap<>();
+        addToMapSet(expected, "root/apps1", "not-a/1.0.1");
+
+        SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data3"));
+        resolver.prepare(partitionByName(repo.getFeatures()),
                          features,
                          Collections.emptyMap());
-        resolver.resolve(Collections.singleton("b"),
-                         FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                          null, null, null);
 
         verify(resolver, expected);
@@ -134,11 +216,10 @@ public class SubsystemTest {
         addToMapSet(expected, "root/apps1", "a/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data4"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                          features,
                          Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                         FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                          null, null, null);
 
         verify(resolver, expected);
@@ -156,11 +237,10 @@ public class SubsystemTest {
         addToMapSet(expected, "root/apps1", "b/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data4"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                          features,
                          Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                         FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                          null, null, null);
 
         verify(resolver, expected);
@@ -178,11 +258,10 @@ public class SubsystemTest {
         addToMapSet(expected, "root/apps1", "c/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data1"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                 features,
                 Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                 null, null, null);
 
         verify(resolver, expected);
@@ -199,11 +278,10 @@ public class SubsystemTest {
         addToMapSet(expected, "root", "b/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data5"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                 features,
                 Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                 null, null, null);
 
         verify(resolver, expected);
@@ -221,11 +299,10 @@ public class SubsystemTest {
         addToMapSet(expected, "root", "c/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data5"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                 features,
                 Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                 null, null, null);
 
         verify(resolver, expected);
@@ -243,11 +320,10 @@ public class SubsystemTest {
         addToMapSet(expected, "root", "c/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data6"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                 features,
                 Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                 null, null, null);
 
         verify(resolver, expected);
@@ -267,11 +343,10 @@ public class SubsystemTest {
         addToMapSet(expected, "root/apps1", "b/1.0.0");
 
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data7"));
-        resolver.prepare(Arrays.asList(repo.getFeatures()),
+        resolver.prepare(partitionByName(repo.getFeatures()),
                 features,
                 Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                 null, null, null);
 
         verify(resolver, expected);
@@ -294,12 +369,37 @@ public class SubsystemTest {
         addToMapSet(expected, "root", "pax-web-tomcat/6.0.4");
         addToMapSet(expected, "root", "pax-web-api/6.0.4");
 
+        Deployer.DeploymentState ds = new Deployer.DeploymentState();
+        ds.partitionFeatures(allFeatures);
         SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data9"));
-        resolver.prepare(allFeatures,
+        resolver.prepare(ds.featuresByName(),
                 features,
                 Collections.emptyMap());
-        resolver.resolve(Collections.emptySet(),
-                FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
+                null, null, null);
+
+        verify(resolver, expected);
+    }
+
+    @Test
+    public void testBlacklistedBundles() throws Exception {
+        RepositoryImpl repo = new RepositoryImpl(getClass().getResource("data10/features.xml").toURI());
+
+        // this is normally done by features processor
+        ((Bundle)repo.getFeatures()[0].getBundles().get(1)).setBlacklisted(true);
+
+        Map<String, Set<String>> features = new HashMap<>();
+        addToMapSet(features, "root", "f1");
+
+        Map<String, Set<String>> expected = new HashMap<>();
+        // we expect only bundle "a", as "b" is blacklisted
+        addToMapSet(expected, "root", "a/1.0.0");
+
+        SubsystemResolver resolver = new SubsystemResolver(this.resolver, new TestDownloadManager(getClass(), "data10"));
+        resolver.prepare(partitionByName(repo.getFeatures()),
+                features,
+                Collections.emptyMap());
+        resolver.resolve(FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE,
                 null, null, null);
 
         verify(resolver, expected);
@@ -357,4 +457,10 @@ public class SubsystemTest {
                 + cap.getAttributes().get(IdentityNamespace.CAPABILITY_VERSION_ATTRIBUTE);
     }
 
+    private Map<String, List<Feature>> partitionByName(Feature[] features) {
+        Deployer.DeploymentState ds = new Deployer.DeploymentState();
+        ds.partitionFeatures(Arrays.asList(features));
+        return ds.featuresByName();
+    }
+
 }
diff --git a/features/core/src/test/java/org/apache/karaf/features/internal/service/DeployerTest.java b/features/core/src/test/java/org/apache/karaf/features/internal/service/DeployerTest.java
index a08d7c5..a124e8d 100644
--- a/features/core/src/test/java/org/apache/karaf/features/internal/service/DeployerTest.java
+++ b/features/core/src/test/java/org/apache/karaf/features/internal/service/DeployerTest.java
@@ -19,6 +19,7 @@ package org.apache.karaf.features.internal.service;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URL;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.EnumSet;
@@ -32,6 +33,7 @@ import java.util.jar.Manifest;
 
 import org.apache.felix.resolver.ResolverImpl;
 import org.apache.felix.utils.version.VersionRange;
+import org.apache.karaf.features.BundleInfo;
 import org.apache.karaf.features.DeploymentEvent;
 import org.apache.karaf.features.Feature;
 import org.apache.karaf.features.FeatureEvent;
@@ -113,9 +115,7 @@ public class DeployerTest {
         dstate.state = new State();
         dstate.bundles = new HashMap<>();
         dstate.bundlesPerRegion = new HashMap<>();
-        dstate.features = new HashMap<>();
-        dstate.features.put(f100.getId(), f100);
-        dstate.features.put(f101.getId(), f101);
+        dstate.partitionFeatures(Arrays.asList(f100, f101));
         dstate.filtersPerRegion = new HashMap<>();
         dstate.filtersPerRegion.put(ROOT_REGION, new HashMap<>());
 
@@ -124,9 +124,8 @@ public class DeployerTest {
         request.featureResolutionRange = DEFAULT_FEATURE_RESOLUTION_RANGE;
         request.globalRepository = null;
         request.options = EnumSet.noneOf(Option.class);
-        request.overrides = Collections.emptySet();
         request.stateChanges = Collections.emptyMap();
-        request.updateSnaphots = UPDATE_SNAPSHOTS_NONE;
+        request.updateSnaphots = SnapshotUpdateBehavior.None;
         request.requirements = new HashMap<>();
         addToMapSet(request.requirements, ROOT_REGION, f100.getName() + "/" + new VersionRange(f100.getVersion(), true));
 
@@ -215,9 +214,7 @@ public class DeployerTest {
         dstate.bundles.put(1L, bundleA);
         dstate.bundlesPerRegion = new HashMap<>();
         addToMapSet(dstate.bundlesPerRegion, ROOT_REGION, 1L);
-        dstate.features = new HashMap<>();
-        dstate.features.put(f100.getId(), f100);
-        dstate.features.put(f101.getId(), f101);
+        dstate.partitionFeatures(Arrays.asList(f100, f101));
         dstate.filtersPerRegion = new HashMap<>();
         dstate.filtersPerRegion.put(ROOT_REGION, new HashMap<>());
 
@@ -226,9 +223,8 @@ public class DeployerTest {
         request.featureResolutionRange = DEFAULT_FEATURE_RESOLUTION_RANGE;
         request.globalRepository = null;
         request.options = EnumSet.noneOf(Option.class);
-        request.overrides = Collections.emptySet();
         request.stateChanges = Collections.emptyMap();
-        request.updateSnaphots = UPDATE_SNAPSHOTS_NONE;
+        request.updateSnaphots = SnapshotUpdateBehavior.None;
         request.requirements = new HashMap<>();
         addToMapSet(request.requirements, ROOT_REGION, f101.getName() + "/" + new VersionRange(f101.getVersion(), true));
 
@@ -286,7 +282,7 @@ public class DeployerTest {
         dstate.bundles.put(serviceBundle.getBundleId(), serviceBundle);
         dstate.bundlesPerRegion = new HashMap<>();
         addToMapSet(dstate.bundlesPerRegion, ROOT_REGION, serviceBundle.getBundleId());
-        dstate.features = Collections.singletonMap(f1.getId(), f1);
+        dstate.partitionFeatures(Collections.singletonList(f1));
         dstate.filtersPerRegion = new HashMap<>();
         dstate.filtersPerRegion.put(ROOT_REGION, new HashMap<>());
 
@@ -295,9 +291,8 @@ public class DeployerTest {
         request.featureResolutionRange = DEFAULT_FEATURE_RESOLUTION_RANGE;
         request.globalRepository = null;
         request.options = EnumSet.noneOf(Option.class);
-        request.overrides = Collections.emptySet();
         request.stateChanges = Collections.emptyMap();
-        request.updateSnaphots = UPDATE_SNAPSHOTS_NONE;
+        request.updateSnaphots = SnapshotUpdateBehavior.None;
         request.requirements = new HashMap<>();
         addToMapSet(request.requirements, ROOT_REGION, f1.getName());
 
@@ -358,9 +353,7 @@ public class DeployerTest {
         dstate.state = new State();
         dstate.bundles = new HashMap<>();
         dstate.bundlesPerRegion = new HashMap<>();
-        dstate.features = new HashMap<>();
-        dstate.features.put(f1.getId(), f1);
-        dstate.features.put(f2.getId(), f2);
+        dstate.partitionFeatures(Arrays.asList(f1, f2));
         dstate.filtersPerRegion = new HashMap<>();
         dstate.filtersPerRegion.put(ROOT_REGION, new HashMap<>());
 
@@ -369,9 +362,8 @@ public class DeployerTest {
         request.featureResolutionRange = DEFAULT_FEATURE_RESOLUTION_RANGE;
         request.globalRepository = null;
         request.options = EnumSet.noneOf(Option.class);
-        request.overrides = Collections.emptySet();
         request.stateChanges = Collections.emptyMap();
-        request.updateSnaphots = UPDATE_SNAPSHOTS_NONE;
+        request.updateSnaphots = SnapshotUpdateBehavior.None;
         request.requirements = new HashMap<>();
         addToMapSet(request.requirements, ROOT_REGION, f2.getName());
 
@@ -425,9 +417,7 @@ public class DeployerTest {
         dstate.bundles.put(serviceBundle1.getBundleId(), serviceBundle1);
         dstate.bundlesPerRegion = new HashMap<>();
         addToMapSet(dstate.bundlesPerRegion, ROOT_REGION, serviceBundle1.getBundleId());
-        dstate.features = new HashMap<>();
-        dstate.features.put(f1.getId(), f1);
-        dstate.features.put(f2.getId(), f2);
+        dstate.partitionFeatures(Arrays.asList(f1, f2));
         dstate.filtersPerRegion = new HashMap<>();
         dstate.filtersPerRegion.put(ROOT_REGION, new HashMap<>());
 
@@ -436,9 +426,8 @@ public class DeployerTest {
         request.featureResolutionRange = DEFAULT_FEATURE_RESOLUTION_RANGE;
         request.globalRepository = null;
         request.options = EnumSet.noneOf(Option.class);
-        request.overrides = Collections.emptySet();
         request.stateChanges = Collections.emptyMap();
-        request.updateSnaphots = UPDATE_SNAPSHOTS_NONE;
+        request.updateSnaphots = SnapshotUpdateBehavior.None;
         request.requirements = new HashMap<>();
         addToMapSet(request.requirements, ROOT_REGION, f2.getName());
 
@@ -485,10 +474,7 @@ public class DeployerTest {
         dstate.state = new State();
         dstate.bundles = new HashMap<>();
         dstate.bundlesPerRegion = new HashMap<>();
-        dstate.features = new HashMap<>();
-        for (Feature f : repo.getFeatures()) {
-            dstate.features.put(f.getId(), f);
-        }
+        dstate.partitionFeatures(Arrays.asList(repo.getFeatures()));
         dstate.filtersPerRegion = new HashMap<>();
         dstate.filtersPerRegion.put(ROOT_REGION, new HashMap<>());
 
@@ -497,9 +483,8 @@ public class DeployerTest {
         request.featureResolutionRange = DEFAULT_FEATURE_RESOLUTION_RANGE;
         request.globalRepository = null;
         request.options = EnumSet.noneOf(Option.class);
-        request.overrides = Collections.emptySet();
         request.stateChanges = Collections.emptyMap();
-        request.updateSnaphots = UPDATE_SNAPSHOTS_NONE;
+        request.updateSnaphots = SnapshotUpdateBehavior.None;
 
         MyDeployCallback callback = new MyDeployCallback(dstate, bundles);
         Deployer deployer = new Deployer(manager, resolver, callback);
@@ -647,5 +632,10 @@ public class DeployerTest {
         public void installLibraries(Feature feature) throws IOException {
 
         }
+
+        @Override
+        public void bundleBlacklisted(BundleInfo bundleInfo) {
+
+        }
     }
 }
diff --git a/features/core/src/test/java/org/apache/karaf/features/internal/service/FeaturesProcessorTest.java b/features/core/src/test/java/org/apache/karaf/features/internal/service/FeaturesProcessorTest.java
index 63cc100..e73e7d1 100644
--- a/features/core/src/test/java/org/apache/karaf/features/internal/service/FeaturesProcessorTest.java
+++ b/features/core/src/test/java/org/apache/karaf/features/internal/service/FeaturesProcessorTest.java
@@ -24,6 +24,7 @@ import javax.xml.bind.Marshaller;
 
 import org.apache.felix.utils.manifest.Clause;
 import org.apache.felix.utils.version.VersionRange;
+import org.apache.karaf.features.BundleInfo;
 import org.apache.karaf.features.Feature;
 import org.apache.karaf.features.internal.model.Bundle;
 import org.apache.karaf.features.internal.model.processing.BundleReplacements;
@@ -186,18 +187,18 @@ public class FeaturesProcessorTest {
         RepositoryImpl repo = (RepositoryImpl) new RepositoryCacheImpl(processor).create(uri, true);
 
         Feature f1 = repo.getFeatures()[0];
-        assertFalse(f1.getBundles().get(0).isOverriden());
-        assertTrue(f1.getBundles().get(1).isOverriden());
+        assertTrue(f1.getBundles().get(0).isOverriden() == BundleInfo.BundleOverrideMode.NONE);
+        assertTrue(f1.getBundles().get(1).isOverriden() == BundleInfo.BundleOverrideMode.OSGI);
         assertThat(f1.getBundles().get(1).getLocation(), equalTo("mvn:commons-io/commons-io/1.3.5"));
         assertThat(f1.getBundles().get(1).getOriginalLocation(), equalTo("mvn:commons-io/commons-io/1.3"));
-        assertTrue(f1.getBundles().get(2).isOverriden());
+        assertTrue(f1.getBundles().get(2).isOverriden() == BundleInfo.BundleOverrideMode.MAVEN);
         assertThat(f1.getBundles().get(2).getLocation(), equalTo("mvn:commons-codec/commons-codec/1.4.2"));
         assertThat(f1.getBundles().get(2).getOriginalLocation(), equalTo("mvn:commons-codec/commons-codec/0.4"));
-        assertFalse(f1.getBundles().get(3).isOverriden());
-        assertTrue(f1.getConditional().get(0).getBundles().get(0).isOverriden());
+        assertTrue(f1.getBundles().get(3).isOverriden() == BundleInfo.BundleOverrideMode.NONE);
+        assertTrue(f1.getConditional().get(0).getBundles().get(0).isOverriden() == BundleInfo.BundleOverrideMode.OSGI);
         assertThat(f1.getConditional().get(0).getBundles().get(0).getLocation(), equalTo("mvn:org.glassfish/something-strangest/4.3.1"));
         assertThat(f1.getConditional().get(0).getBundles().get(0).getOriginalLocation(), equalTo("mvn:org.glassfish/something-strangest/4.3.0"));
-        assertFalse(f1.getConditional().get(0).getBundles().get(1).isOverriden());
+        assertTrue(f1.getConditional().get(0).getBundles().get(1).isOverriden() == BundleInfo.BundleOverrideMode.NONE);
     }
 
     @Test
diff --git a/features/core/src/test/resources/org/apache/karaf/features/internal/region/data1/d.mf b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data1/d.mf
new file mode 100644
index 0000000..2ef8fc0
--- /dev/null
+++ b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data1/d.mf
@@ -0,0 +1,6 @@
+Manifest-Version: 1
+Bundle-ManifestVersion: 2
+Bundle-SymbolicName: d
+Bundle-Version: 1.0.0
+Require-Capability: ns;filter:="(ns=c)"
+
diff --git a/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/a.mf b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/a.mf
new file mode 100644
index 0000000..20a7811
--- /dev/null
+++ b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/a.mf
@@ -0,0 +1,5 @@
+Manifest-Version: 1
+Bundle-ManifestVersion: 2
+Bundle-SymbolicName: a
+Bundle-Version: 1.0.0
+
diff --git a/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/b.mf b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/b.mf
new file mode 100644
index 0000000..dc96158
--- /dev/null
+++ b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/b.mf
@@ -0,0 +1,5 @@
+Manifest-Version: 1
+Bundle-ManifestVersion: 2
+Bundle-SymbolicName: b
+Bundle-Version: 1.0.0
+
diff --git a/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/features.xml b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/features.xml
new file mode 100644
index 0000000..03c9853
--- /dev/null
+++ b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data10/features.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements. See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License. You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+
+-->
+<features name="test" xmlns="http://karaf.apache.org/xmlns/features/v1.3.0">
+
+    <feature name="f1">
+        <bundle>a</bundle>
+        <bundle>b</bundle>
+    </feature>
+
+</features>
diff --git a/features/core/src/test/resources/org/apache/karaf/features/internal/region/data3/c.mf b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data3/c.mf
new file mode 100644
index 0000000..4fe2569
--- /dev/null
+++ b/features/core/src/test/resources/org/apache/karaf/features/internal/region/data3/c.mf
@@ -0,0 +1,5 @@
+Manifest-Version: 1
+Bundle-ManifestVersion: 2
+Bundle-SymbolicName: not-a
+Bundle-Version: 1.0.1
+
diff --git a/profile/pom.xml b/profile/pom.xml
index 118a14f..a3c9e11 100644
--- a/profile/pom.xml
+++ b/profile/pom.xml
@@ -177,18 +177,18 @@
                             org.apache.karaf.profile.command.completers,
                         </Export-Package>
                         <Private-Package>
+                            org.apache.karaf.profile.assembly,
                             org.apache.karaf.profile.command,
                             org.apache.karaf.profile.command.completers,
-                            org.apache.karaf.profile.assembly,
                             org.apache.karaf.profile.impl,
                             org.apache.karaf.profile.impl.osgi,
-                            org.apache.karaf.profile.versioning,
-                            org.apache.karaf.util,
-                            org.apache.karaf.util.config,
-                            org.apache.karaf.util.maven,
-                            org.apache.felix.utils.manifest,
-                            org.apache.felix.utils.version,
-                            org.apache.felix.utils.properties,
+                            org.apache.karaf.util;-split-package:=merge-first,
+                            org.apache.karaf.util.config;-split-package:=merge-first,
+                            org.apache.karaf.util.maven;-split-package:=merge-first,
+                            org.apache.karaf.util.tracker;-split-package:=merge-first,
+                            org.apache.felix.utils.manifest;-split-package:=merge-first,
+                            org.apache.felix.utils.properties;-split-package:=merge-first,
+                            org.apache.felix.utils.version;-split-package:=merge-first
                         </Private-Package>
                     </instructions>
                 </configuration>
diff --git a/profile/src/main/java/org/apache/karaf/profile/assembly/ArtifactInstaller.java b/profile/src/main/java/org/apache/karaf/profile/assembly/ArtifactInstaller.java
index f068f5e..f1b280f 100644
--- a/profile/src/main/java/org/apache/karaf/profile/assembly/ArtifactInstaller.java
+++ b/profile/src/main/java/org/apache/karaf/profile/assembly/ArtifactInstaller.java
@@ -48,13 +48,19 @@ public class ArtifactInstaller {
         this.downloader = downloader;
         this.blacklist = blacklist;
     }
-    
+
+    /**
+     * Installs a {@link BundleInfo} into <code>system/</code> directory taking into account <em>blacklisted</em>
+     * and <em>overriden</em> flags.
+     * @param bundle
+     * @throws Exception
+     */
     public void installArtifact(BundleInfo bundle) throws Exception {
         if (bundle.isBlacklisted()) {
             LOGGER.info("      skipping blacklisted maven artifact: " + bundle.getLocation());
             return;
         }
-        if (bundle.isOverriden()) {
+        if (bundle.isOverriden() != BundleInfo.BundleOverrideMode.NONE) {
             LOGGER.info("      adding overriden maven artifact: " + bundle.getLocation() + " (original location: " + bundle.getOriginalLocation() + ")");
         } else {
             LOGGER.info("      adding maven artifact: " + bundle.getLocation());
@@ -76,6 +82,12 @@ public class ArtifactInstaller {
         });
     }
 
+    /**
+     * Installs generic artifact to <code>system/</code> directory. For bundles, dedicated {@link #installArtifact(BundleInfo)}
+     * should be used.
+     * @param location
+     * @throws Exception
+     */
     public void installArtifact(String location) throws Exception {
         LOGGER.info("      adding maven artifact: " + location);
         location = removeTrailingSlash(stripUrl(location));
diff --git a/profile/src/main/java/org/apache/karaf/profile/assembly/AssemblyDeployCallback.java b/profile/src/main/java/org/apache/karaf/profile/assembly/AssemblyDeployCallback.java
index 6e854dc..c9c3560 100644
--- a/profile/src/main/java/org/apache/karaf/profile/assembly/AssemblyDeployCallback.java
+++ b/profile/src/main/java/org/apache/karaf/profile/assembly/AssemblyDeployCallback.java
@@ -26,6 +26,7 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.Hashtable;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicLong;
@@ -33,6 +34,7 @@ import java.util.jar.Attributes;
 import java.util.jar.JarFile;
 
 import org.apache.felix.utils.properties.Properties;
+import org.apache.karaf.features.BundleInfo;
 import org.apache.karaf.features.DeploymentEvent;
 import org.apache.karaf.features.FeatureEvent;
 import org.apache.karaf.features.FeaturesService;
@@ -43,8 +45,8 @@ import org.apache.karaf.features.internal.model.ConfigFile;
 import org.apache.karaf.features.internal.model.Feature;
 import org.apache.karaf.features.internal.model.Features;
 import org.apache.karaf.features.internal.model.Library;
-import org.apache.karaf.features.internal.service.Blacklist;
 import org.apache.karaf.features.internal.service.Deployer;
+import org.apache.karaf.features.internal.service.FeaturesProcessor;
 import org.apache.karaf.features.internal.service.State;
 import org.apache.karaf.features.internal.service.StaticInstallSupport;
 import org.apache.karaf.features.internal.util.MapUtils;
@@ -56,48 +58,71 @@ import org.osgi.framework.wiring.BundleRevision;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Callback through which {@link Deployer} will interact with the distribution that's being assembled.
+ */
 public class AssemblyDeployCallback extends StaticInstallSupport implements Deployer.DeployCallback {
 
     private static final Logger LOGGER = LoggerFactory.getLogger(Builder.class);
 
     private final DownloadManager manager;
     private final Builder builder;
-    private Blacklist featureBlacklist;
-    private Blacklist bundleBlacklist;
     private final Path homeDirectory;
     private final int defaultStartLevel;
     private final Path etcDirectory;
     private final Path systemDirectory;
     private final Deployer.DeploymentState dstate;
     private final AtomicLong nextBundleId = new AtomicLong(0);
+    private final FeaturesProcessor processor;
 
     private final Map<String, Bundle> bundles = new HashMap<>();
 
-    public AssemblyDeployCallback(DownloadManager manager, Builder builder, BundleRevision systemBundle, Collection<Features> repositories) {
+    /**
+     * Create a {@link Deployer.DeployCallback} performing actions on runtime with single system bundle installed
+     * and with access to all non-blacklisted features.
+     * @param manager
+     * @param builder
+     * @param systemBundle
+     * @param repositories
+     * @param processor
+     */
+    public AssemblyDeployCallback(DownloadManager manager, Builder builder, BundleRevision systemBundle, Collection<Features> repositories,
+                                  FeaturesProcessor processor) {
         this.manager = manager;
         this.builder = builder;
-//        this.featureBlacklist = new Blacklist(builder.getBlacklistedFeatures());
-//        this.bundleBlacklist = new Blacklist(builder.getBlacklistedBundles());
         this.homeDirectory = builder.homeDirectory;
         this.etcDirectory = homeDirectory.resolve("etc");
         this.systemDirectory = homeDirectory.resolve("system");
         this.defaultStartLevel = builder.defaultStartLevel;
+        this.processor = processor;
+
         dstate = new Deployer.DeploymentState();
         dstate.bundles = new HashMap<>();
-        dstate.features = new HashMap<>();
         dstate.bundlesPerRegion = new HashMap<>();
         dstate.filtersPerRegion = new HashMap<>();
         dstate.state = new State();
 
         MapUtils.addToMapSet(dstate.bundlesPerRegion, FeaturesService.ROOT_REGION, 0l);
         dstate.bundles.put(0l, systemBundle.getBundle());
+
+        Collection<org.apache.karaf.features.Feature> features = new LinkedList<>();
         for (Features repo : repositories) {
+            if (repo.isBlacklisted()) {
+                continue;
+            }
             for (Feature f : repo.getFeature()) {
-                dstate.features.put(f.getId(), f);
+                if (!f.isBlacklisted()) {
+                    features.add(f);
+                }
             }
         }
+        dstate.partitionFeatures(features);
     }
 
+    /**
+     * Get startup bundles with related start-level
+     * @return
+     */
     public Map<String, Integer> getStartupBundles() {
         Map<String, Integer> startup = new HashMap<>();
         for (Map.Entry<String, Bundle> bundle : bundles.entrySet()) {
@@ -193,11 +218,11 @@ public class AssemblyDeployCallback extends StaticInstallSupport implements Depl
     }
     
     private void assertNotBlacklisted(org.apache.karaf.features.Feature feature) {
-//        if (featureBlacklist.isFeatureBlacklisted(feature.getName(), feature.getVersion())) {
-//            if (builder.getBlacklistPolicy() == Builder.BlacklistPolicy.Fail) {
-//                throw new RuntimeException("Feature " + feature.getId() + " is blacklisted");
-//            }
-//        }
+        if (feature.isBlacklisted()) {
+            if (builder.getBlacklistPolicy() == Builder.BlacklistPolicy.Fail) {
+                throw new RuntimeException("Feature " + feature.getId() + " is blacklisted");
+            }
+        }
     }
 
     @Override
@@ -211,11 +236,12 @@ public class AssemblyDeployCallback extends StaticInstallSupport implements Depl
     @Override
     public Bundle installBundle(String region, String uri, InputStream is) throws BundleException {
         // Check blacklist
-//        if (bundleBlacklist.isBundleBlacklisted(uri)) {
-//            if (builder.getBlacklistPolicy() == Builder.BlacklistPolicy.Fail) {
-//                throw new RuntimeException("Bundle " + uri + " is blacklisted");
-//            }
-//        }
+        if (processor.isBundleBlacklisted(uri)) {
+            if (builder.getBlacklistPolicy() == Builder.BlacklistPolicy.Fail) {
+                throw new RuntimeException("Bundle " + uri + " is blacklisted");
+            }
+        }
+
         // Install
         LOGGER.info("      adding maven artifact: " + uri);
         try {
@@ -226,7 +252,7 @@ public class AssemblyDeployCallback extends StaticInstallSupport implements Depl
                 path = Parser.pathFromMaven(uri);
             } else {
                 uri = uri.replaceAll("[^0-9a-zA-Z.\\-_]+", "_");
-		        if (uri.length() > 256) {
+                if (uri.length() > 256) {
                     //to avoid the File name too long exception
                     uri = uri.substring(0, 255);
                 }
@@ -261,6 +287,11 @@ public class AssemblyDeployCallback extends StaticInstallSupport implements Depl
         bundle.adapt(BundleStartLevel.class).setStartLevel(startLevel);
     }
 
+    @Override
+    public void bundleBlacklisted(BundleInfo bundleInfo) {
+        LOGGER.info("      skipping blacklisted bundle: {}", bundleInfo.getLocation());
+    }
+
     private String substFinalName(String finalname) {
         final String markerVarBeg = "${";
         final String markerVarEnd = "}";
diff --git a/profile/src/main/java/org/apache/karaf/profile/assembly/Builder.java b/profile/src/main/java/org/apache/karaf/profile/assembly/Builder.java
index 8a41a76..274d535 100644
--- a/profile/src/main/java/org/apache/karaf/profile/assembly/Builder.java
+++ b/profile/src/main/java/org/apache/karaf/profile/assembly/Builder.java
@@ -103,6 +103,7 @@ import org.ops4j.pax.url.mvn.MavenResolvers;
 import org.osgi.framework.Constants;
 import org.osgi.framework.wiring.BundleRevision;
 import org.osgi.resource.Resource;
+import org.osgi.service.repository.Repository;
 import org.osgi.service.resolver.Resolver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -1278,6 +1279,9 @@ public class Builder {
                         Path input = provider.getFile().toPath();
                         String name = filename != null ? filename : input.getFileName().toString();
                         Path libOutput = homeDirectory.resolve(path).resolve(name);
+                        if (!libOutput.toFile().getParentFile().isDirectory()) {
+                            libOutput.toFile().getParentFile().mkdirs();
+                        }
                         LOGGER.info("{}   adding library: {}", indent, homeDirectory.relativize(libOutput));
                         Files.copy(input, libOutput, StandardCopyOption.REPLACE_EXISTING);
                         if (provider.getUrl().startsWith("mvn:")) {
@@ -1342,7 +1346,7 @@ public class Builder {
         Downloader downloader = manager.createDownloader();
 
         // Load startup repositories
-        LOGGER.info("   Loading repositories");
+        LOGGER.info("   Loading installed repositories");
         Map<String, Features> installedRepositories = loadRepositories(manager, installedEffective.getRepositories(), true, processor);
         // Compute startup feature dependencies
         Set<Feature> allInstalledFeatures = new HashSet<>();
@@ -1388,7 +1392,7 @@ public class Builder {
         Profile bootOverlay = Profiles.getOverlay(bootProfile, allProfiles, environment);
         Profile bootEffective = Profiles.getEffective(bootOverlay, false);
         // Load startup repositories
-        LOGGER.info("   Loading repositories");
+        LOGGER.info("   Loading boot repositories");
         Map<String, Features> bootRepositories = loadRepositories(manager, bootEffective.getRepositories(), true, processor);
         // Compute startup feature dependencies
         Set<Feature> allBootFeatures = new HashSet<>();
@@ -1605,21 +1609,24 @@ public class Builder {
         Profile startupOverlay = Profiles.getOverlay(startupProfile, allProfiles, environment);
         Profile startupEffective = Profiles.getEffective(startupOverlay, false);
         // Load startup repositories
-        LOGGER.info("   Loading repositories");
+        LOGGER.info("   Loading startup repositories");
         Map<String, Features> startupRepositories = loadRepositories(manager, startupEffective.getRepositories(), false, processor);
 
         //
         // Resolve
         //
-        LOGGER.info("   Resolving features");
+        LOGGER.info("   Resolving startup features and bundles");
+        LOGGER.info("      Features: " + startupEffective.getFeatures().stream().collect(Collectors.joining(", ")));
+        LOGGER.info("      Bundles: " + startupEffective.getBundles().stream().collect(Collectors.joining(", ")));
+
         Map<String, Integer> bundles =
                 resolve(manager,
                         resolver,
                         startupRepositories.values(),
                         startupEffective.getFeatures(),
                         startupEffective.getBundles(),
-                        startupEffective.getOverrides(),
-                        startupEffective.getOptionals());
+                        startupEffective.getOptionals(),
+                        processor);
 
         //
         // Generate startup.properties
@@ -1709,10 +1716,9 @@ public class Builder {
                             try (InputStream is = provider.open()) {
                                 Features featuresModel = JaxbUtil.unmarshal(url, is, false);
                                 // always process according to processor configuration
+                                featuresModel.setBlacklisted(processor.isRepositoryBlacklisted(url));
                                 processor.process(featuresModel);
-                                // TODO consult blacklist policy
-//                                if (blacklistPolicy == BlacklistPolicy.Discard) {
-//                                }
+
                                 loaded.put(provider.getUrl(), featuresModel);
                                 for (String innerRepository : featuresModel.getRepository()) {
                                     downloader.download(innerRepository, this);
@@ -1754,59 +1760,85 @@ public class Builder {
                 .getProfile();
     }
 
+    /**
+     * <p>Resolves set of features and bundles using OSGi resolver to calculate startup stage bundles.</p>
+     * <p>Startup stage means that <em>current</em> state of the OSGi framework is just single system bundle installed
+     * and bundles+features are being resolved against this single <em>bundle 0</em>.</p>
+     *
+     * @param manager {@link DownloadManager} to help downloading bundles and resources
+     * @param resolver OSGi resolver which will resolve features and bundles in framework with only system bundle installed
+     * @param repositories all available (not only to-be-installed) features
+     * @param features feature identifiers to resolve
+     * @param bundles bundle locations to resolve
+     * @param optionals optional URI locations that'll be available through {@link org.osgi.service.repository.Repository},
+     * used in resolution process
+     * @param processor {@link FeaturesProcessor} to process repositories/features/bundles
+     * @return map from bundle URI to bundle start-level
+     * @throws Exception
+     */
     private Map<String, Integer> resolve(
                     DownloadManager manager,
                     Resolver resolver,
                     Collection<Features> repositories,
                     Collection<String> features,
                     Collection<String> bundles,
-                    Collection<String> overrides,
-                    Collection<String> optionals) throws Exception {
+                    Collection<String> optionals,
+                    FeaturesProcessor processor) throws Exception {
+
+        // System bundle will be single bundle installed with bundleId == 0
         BundleRevision systemBundle = getSystemBundle();
-        AssemblyDeployCallback callback = new AssemblyDeployCallback(manager, this, systemBundle, repositories);
+        // Static distribution building callback and deployer that's used to deploy/collect startup-stage artifacts
+        AssemblyDeployCallback callback = new AssemblyDeployCallback(manager, this, systemBundle, repositories, processor);
         Deployer deployer = new Deployer(manager, resolver, callback);
 
         // Install framework
         Deployer.DeploymentRequest request = Deployer.DeploymentRequest.defaultDeploymentRequest();
-        // Add overrides
-        request.overrides.addAll(overrides);
-        // Add optional resources
-        final List<Resource> resources = new ArrayList<>();
-        Downloader downloader = manager.createDownloader();
-        for (String optional : optionals) {
-            downloader.download(optional, provider -> {
-                    Resource resource = ResourceBuilder.build(provider.getUrl(), getHeaders(provider));
-                    synchronized (resources) {
-                        resources.add(resource);
-                    }
-            });
-        }
-        downloader.await();
-        request.globalRepository = new BaseRepository(resources);
-        // Install features
+
+        // Add optional resources available through OSGi resource repository
+        request.globalRepository = repositoryOfOptionalResources(manager, optionals);
+
+        // Specify feature requirements (already prefixed with "feature:")
         for (String feature : features) {
             MapUtils.addToMapSet(request.requirements, FeaturesService.ROOT_REGION, feature);
         }
+        // Specify bundle requirements
         for (String bundle : bundles) {
             MapUtils.addToMapSet(request.requirements, FeaturesService.ROOT_REGION, "bundle:" + bundle);
         }
-        Set<String> prereqs = new HashSet<>();
-        while (true) {
-            try {
-                deployer.deploy(callback.getDeploymentState(), request);
-                break;
-            } catch (Deployer.PartialDeploymentException e) {
-                if (!prereqs.containsAll(e.getMissing())) {
-                    prereqs.addAll(e.getMissing());
-                } else {
-                    throw new Exception("Deployment aborted due to loop in missing prerequisites: " + e.getMissing());
-                }
-            }
-        }
+
+        deployer.deployFully(callback.getDeploymentState(), request);
 
         return callback.getStartupBundles();
     }
 
+    /**
+     * Optional resource URIs will be made available through OSGi {@link Repository}
+     * @param manager
+     * @param optionals
+     * @return
+     * @throws Exception
+     */
+    private Repository repositoryOfOptionalResources(DownloadManager manager, Collection<String> optionals)
+            throws Exception {
+        final List<Resource> resources = new ArrayList<>();
+        Downloader downloader = manager.createDownloader();
+        for (String optional : optionals) {
+            downloader.download(optional, provider -> {
+                Resource resource = ResourceBuilder.build(provider.getUrl(), getHeaders(provider));
+                synchronized (resources) {
+                    resources.add(resource);
+                }
+            });
+        }
+        downloader.await();
+        return new BaseRepository(resources);
+    }
+
+    /**
+     * Prepares {@link BundleRevision} that represents System Bundle (a.k.a. <em>bundle 0</em>)
+     * @return
+     * @throws Exception
+     */
     @SuppressWarnings("rawtypes")
     private BundleRevision getSystemBundle() throws Exception {
         Path configPropPath = etcDirectory.resolve("config.properties");
diff --git a/profile/src/main/java/org/apache/karaf/profile/assembly/ConfigInstaller.java b/profile/src/main/java/org/apache/karaf/profile/assembly/ConfigInstaller.java
index 13943a2..de40401 100644
--- a/profile/src/main/java/org/apache/karaf/profile/assembly/ConfigInstaller.java
+++ b/profile/src/main/java/org/apache/karaf/profile/assembly/ConfigInstaller.java
@@ -16,8 +16,6 @@
  */
 package org.apache.karaf.profile.assembly;
 
-import java.io.IOException;
-import java.net.MalformedURLException;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.StandardCopyOption;
@@ -33,6 +31,9 @@ import org.apache.karaf.features.internal.model.Feature;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Installs PID configuration to <code>${karaf.etc}</code> and <code>system/</code> directory.
+ */
 public class ConfigInstaller {
     private static final Logger LOGGER = LoggerFactory.getLogger(ConfigInstaller.class);
     private Path etcDirectory;
@@ -41,11 +42,10 @@ public class ConfigInstaller {
     public ConfigInstaller(Path etcDirectory, List<String> pidsToExtract) {
         this.etcDirectory = etcDirectory;
         this.pidsToExtract = pidsToExtract;
-        // TODO Auto-generated constructor stub
     }
 
     public void installConfigs(Feature feature, Downloader downloader, ArtifactInstaller installer)
-        throws Exception, MalformedURLException, IOException {
+        throws Exception {
         List<Content> contents = new ArrayList<>();
         contents.add(feature);
         contents.addAll(feature.getConditional());
diff --git a/profile/src/main/java/org/apache/karaf/profile/impl/ProfileBuilderImpl.java b/profile/src/main/java/org/apache/karaf/profile/impl/ProfileBuilderImpl.java
index 24dbe98..9ea1722 100644
--- a/profile/src/main/java/org/apache/karaf/profile/impl/ProfileBuilderImpl.java
+++ b/profile/src/main/java/org/apache/karaf/profile/impl/ProfileBuilderImpl.java
@@ -310,7 +310,7 @@ public final class ProfileBuilderImpl implements ProfileBuilder {
      * @return
      */
     private byte[] reformat(String name, FileContent fileContent) {
-        if (!fileContent.generated && !(isOverlay && name.equals(INTERNAL_PID + PROPERTIES_SUFFIX))) {
+        if (!fileContent.generated || !(isOverlay && name.equals(INTERNAL_PID + PROPERTIES_SUFFIX))) {
             return fileContent.bytes;
         }
 
diff --git a/profile/src/test/java/org/apache/karaf/profile/impl/ProfilesTest.java b/profile/src/test/java/org/apache/karaf/profile/impl/ProfilesTest.java
index 5a1e886..6369e38 100644
--- a/profile/src/test/java/org/apache/karaf/profile/impl/ProfilesTest.java
+++ b/profile/src/test/java/org/apache/karaf/profile/impl/ProfilesTest.java
@@ -89,7 +89,7 @@ public class ProfilesTest {
     }
 
     @Test(expected = IllegalArgumentException.class)
-    public void testProfilePlaceholderResolverWitCycle() {
+    public void testProfilePlaceholderResolverWithCycle() {
         Profile profile = ProfileBuilder.Factory.create("test")
                 .addConfiguration("pid1", "foo", "b${profile:pid2/bar}")
                 .addConfiguration("pid2", "bar", "a${rep}")
@@ -212,12 +212,13 @@ public class ProfilesTest {
         parents.put("p1", p1);
         parents.put("p2", p2);
 
-        assertThat(Profiles.getOverlay(c, parents).getAttributes().get("a"), equalTo("5"));
-        assertThat(Profiles.getOverlay(c, parents).getAttributes().get("b"), equalTo("4"));
-        assertThat(Profiles.getOverlay(c, parents).getAttributes().get("c"), equalTo("2"));
-        assertThat(Profiles.getOverlay(c, parents).getConfiguration("p").get("p"), equalTo("5"));
-        assertThat(Profiles.getOverlay(c, parents).getConfiguration("p").get("px"), equalTo("1"));
-        assertThat(Profiles.getOverlay(c, parents).getFileConfiguration("f"), equalTo(new byte[] { 0x05 }));
+        Profile overlay = Profiles.getOverlay(c, parents);
+        assertThat(overlay.getAttributes().get("a"), equalTo("5"));
+        assertThat(overlay.getAttributes().get("b"), equalTo("4"));
+        assertThat(overlay.getAttributes().get("c"), equalTo("2"));
+        assertThat(overlay.getConfiguration("p").get("p"), equalTo("5"));
+        assertThat(overlay.getConfiguration("p").get("px"), equalTo("1"));
+        assertThat(overlay.getFileConfiguration("f"), equalTo(new byte[] { 0x05 }));
     }
 
     @Test
diff --git a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/VerifyMojo.java b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/VerifyMojo.java
index 3aca9d3..fce9e55 100644
--- a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/VerifyMojo.java
+++ b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/VerifyMojo.java
@@ -31,7 +31,6 @@ import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.EnumSet;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -39,6 +38,7 @@ import java.util.Hashtable;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -59,6 +59,7 @@ import org.apache.felix.resolver.Logger;
 import org.apache.felix.resolver.ResolverImpl;
 import org.apache.felix.utils.version.VersionRange;
 import org.apache.felix.utils.version.VersionTable;
+import org.apache.karaf.features.BundleInfo;
 import org.apache.karaf.features.DeploymentEvent;
 import org.apache.karaf.features.FeatureEvent;
 import org.apache.karaf.features.FeaturesService;
@@ -86,7 +87,6 @@ import org.apache.karaf.tooling.utils.ReactorMavenResolver;
 import org.apache.karaf.util.config.PropertiesLoader;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.plugin.MojoExecutionException;
-import org.apache.maven.plugin.MojoFailureException;
 import org.apache.maven.plugins.annotations.Component;
 import org.apache.maven.plugins.annotations.Mojo;
 import org.apache.maven.plugins.annotations.Parameter;
@@ -98,7 +98,6 @@ import org.ops4j.pax.url.mvn.MavenResolvers;
 import org.osgi.framework.Bundle;
 import org.osgi.framework.BundleException;
 import org.osgi.framework.Constants;
-import org.osgi.framework.InvalidSyntaxException;
 import org.osgi.framework.Version;
 import org.osgi.framework.namespace.IdentityNamespace;
 import org.osgi.framework.startlevel.BundleStartLevel;
@@ -162,7 +161,7 @@ public class VerifyMojo extends MojoSupport {
     protected MavenResolver resolver;
 
     @Override
-    public void execute() throws MojoExecutionException, MojoFailureException {
+    public void execute() throws MojoExecutionException {
         if (skip) {
             return;
         }
@@ -218,7 +217,7 @@ public class VerifyMojo extends MojoSupport {
         }
     }
 
-    protected void doExecute() throws MojoExecutionException, MojoFailureException {
+    protected void doExecute() throws MojoExecutionException {
         System.setProperty("karaf.home", "target/karaf");
         System.setProperty("karaf.data", "target/karaf/data");
 
@@ -431,7 +430,6 @@ public class VerifyMojo extends MojoSupport {
                 throw new MojoExecutionException("Unable to resolve framework features", e);
             }
 
-
             /*
             boolean resolveOptionalImports = getResolveOptionalImports(properties);
 
@@ -456,25 +454,13 @@ public class VerifyMojo extends MojoSupport {
             }
             */
 
-
             // Install features
             for (String feature : features) {
                 MapUtils.addToMapSet(request.requirements, FeaturesService.ROOT_REGION, feature);
             }
             try {
-                Set<String> prereqs = new HashSet<>();
-                while (true) {
-                    try {
-                        deployer.deploy(callback.getDeploymentState(), request);
-                        break;
-                    } catch (Deployer.PartialDeploymentException e) {
-                        if (!prereqs.containsAll(e.getMissing())) {
-                            prereqs.addAll(e.getMissing());
-                        } else {
-                            throw new Exception("Deployment aborted due to loop in missing prerequisites: " + e.getMissing());
-                        }
-                    }
-                }
+                deployer.deployFully(callback.getDeploymentState(), request);
+
                 // TODO: find unused resources ?
             } catch (Exception e) {
                 throw new MojoExecutionException("Feature resolution failed for " + features
@@ -482,8 +468,6 @@ public class VerifyMojo extends MojoSupport {
                         + "\nRepositories: " + toString(new TreeSet<>(repositories.keySet()))
                         + "\nResources: " + toString(new TreeSet<>(manager.getProviders().keySet())), e);
             }
-
-
         } catch (MojoExecutionException e) {
             throw e;
         } catch (Exception e) {
@@ -641,7 +625,7 @@ public class VerifyMojo extends MojoSupport {
                     new Class[] { Bundle.class },
                     new InvocationHandler() {
                         @Override
-                        public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
+                        public Object invoke(Object proxy, Method method, Object[] args) {
                             if (method.getName().equals("hashCode")) {
                                 return FakeBundleRevision.this.hashCode();
                             } else if (method.getName().equals("equals")) {
@@ -741,22 +725,29 @@ public class VerifyMojo extends MojoSupport {
         private final Deployer.DeploymentState dstate;
         private final AtomicLong nextBundleId = new AtomicLong(0);
 
-        public DummyDeployCallback(Bundle sysBundle, Collection<Features> repositories) throws Exception {
+        public DummyDeployCallback(Bundle sysBundle, Collection<Features> repositories) {
             systemBundle = sysBundle;
             dstate = new Deployer.DeploymentState();
             dstate.bundles = new HashMap<>();
-            dstate.features = new HashMap<>();
             dstate.bundlesPerRegion = new HashMap<>();
             dstate.filtersPerRegion = new HashMap<>();
             dstate.state = new State();
 
             MapUtils.addToMapSet(dstate.bundlesPerRegion, FeaturesService.ROOT_REGION, 0l);
             dstate.bundles.put(0l, systemBundle);
+
+            Collection<org.apache.karaf.features.Feature> features = new LinkedList<>();
             for (Features repo : repositories) {
+                if (repo.isBlacklisted()) {
+                    continue;
+                }
                 for (Feature f : repo.getFeature()) {
-                    dstate.features.put(f.getId(), f);
+                    if (!f.isBlacklisted()) {
+                        features.add(f);
+                    }
                 }
             }
+            dstate.partitionFeatures(features);
         }
 
         public Deployer.DeploymentState getDeploymentState() {
@@ -769,15 +760,15 @@ public class VerifyMojo extends MojoSupport {
         }
 
         @Override
-        public void persistResolveRequest(Deployer.DeploymentRequest request) throws IOException {
+        public void persistResolveRequest(Deployer.DeploymentRequest request) {
         }
 
         @Override
-        public void installConfigs(org.apache.karaf.features.Feature feature) throws IOException, InvalidSyntaxException {
+        public void installConfigs(org.apache.karaf.features.Feature feature) {
         }
         
         @Override
-        public void installLibraries(org.apache.karaf.features.Feature feature) throws IOException {
+        public void installLibraries(org.apache.karaf.features.Feature feature) {
         }
 
         @Override
@@ -812,6 +803,11 @@ public class VerifyMojo extends MojoSupport {
             }
         }
 
+        @Override
+        public void bundleBlacklisted(BundleInfo bundleInfo) {
+
+        }
+
     }
 
     public class MavenResolverLog extends org.apache.felix.resolver.Logger {

-- 
To stop receiving notification emails like this one, please contact
"commits@karaf.apache.org" <co...@karaf.apache.org>.