You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@karaf.apache.org by gn...@apache.org on 2015/04/17 11:34:55 UTC

[3/5] karaf git commit: [KARAF-3671] Clean up karaf-maven-plugin goals

http://git-wip-us.apache.org/repos/asf/karaf/blob/056239dc/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/features/ValidateDescriptorMojo.java
----------------------------------------------------------------------
diff --git a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/features/ValidateDescriptorMojo.java b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/features/ValidateDescriptorMojo.java
deleted file mode 100644
index d87597c..0000000
--- a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/features/ValidateDescriptorMojo.java
+++ /dev/null
@@ -1,665 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.karaf.tooling.features;
-
-import aQute.bnd.header.OSGiHeader;
-import org.apache.felix.utils.manifest.Clause;
-import org.apache.karaf.features.BundleInfo;
-import org.apache.karaf.features.Dependency;
-import org.apache.karaf.features.Feature;
-import org.apache.karaf.features.Repository;
-import org.apache.karaf.features.internal.service.FeatureValidationUtil;
-import org.apache.karaf.features.internal.service.RepositoryImpl;
-import org.apache.karaf.tooling.url.CustomBundleURLStreamHandlerFactory;
-import org.apache.karaf.tooling.utils.ManifestUtils;
-import org.apache.karaf.tooling.utils.MojoSupport;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.DefaultArtifactRepository;
-import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
-import org.apache.maven.artifact.resolver.ArtifactCollector;
-import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
-import org.apache.maven.artifact.resolver.ArtifactResolutionException;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-import org.apache.maven.plugin.MojoExecutionException;
-import org.apache.maven.plugin.MojoFailureException;
-import org.apache.maven.plugins.annotations.Component;
-import org.apache.maven.plugins.annotations.LifecyclePhase;
-import org.apache.maven.plugins.annotations.Mojo;
-import org.apache.maven.plugins.annotations.Parameter;
-import org.apache.maven.plugins.annotations.ResolutionScope;
-import org.apache.maven.shared.dependency.tree.DependencyNode;
-import org.apache.maven.shared.dependency.tree.DependencyTreeBuilder;
-import org.apache.maven.shared.dependency.tree.traversal.DependencyNodeVisitor;
-
-import java.io.*;
-import java.net.URI;
-import java.net.URL;
-import java.util.*;
-import java.util.jar.JarInputStream;
-import java.util.jar.Manifest;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
-
-import static org.apache.karaf.tooling.utils.ManifestUtils.*;
-
-/**
- * Validates a features XML file
- */
-@Mojo(name = "features-validate-descriptor", defaultPhase = LifecyclePhase.PROCESS_RESOURCES, requiresDependencyResolution = ResolutionScope.RUNTIME)
-@SuppressWarnings("deprecation")
-public class ValidateDescriptorMojo extends MojoSupport {
-
-    private static final String MVN_URI_PREFIX = "mvn:";
-    private static final String MVN_REPO_SEPARATOR = "!";
-
-    private static final String KARAF_CORE_STANDARD_FEATURE_URL = "mvn:org.apache.karaf.features/standard/%s/xml/features";
-    private static final String KARAF_CORE_ENTERPRISE_FEATURE_URL = "mvn:org.apache.karaf.features/enterprise/%s/xml/features";
-
-    private static boolean isCustomStreamURLHandlerSet;
-    /**
-     * The dependency tree builder to use.
-     */
-    @Component
-    private DependencyTreeBuilder dependencyTreeBuilder;
-
-    /**
-     * The ArtifactCollector provided by Maven at runtime
-     */
-    @Component
-    private ArtifactCollector collector;
-
-    /**
-     * The file to generate
-     */
-    @Parameter(defaultValue="${project.build.directory}/classes/features.xml")
-    private File file;
-
-    /**
-     * Karaf config.properties
-     */
-    @Parameter(defaultValue="config.properties")
-    private String karafConfig;
-
-    /**
-     * which JRE version to parse from config.properties to get the JRE exported packages
-     */
-    @Parameter(defaultValue = "jre-1.5")
-    private String jreVersion;
-
-    /**
-     * which Karaf version used for Karaf core features resolution
-     */
-    @Parameter
-    private String karafVersion;
-
-    /**
-     * The repositories which are included from the plugin config
-     */
-    @Parameter
-    private List<String> repositories;
-
-    /**
-     * skip non maven protocols or not skip
-     */
-    @Parameter
-    private boolean skipNonMavenProtocols = false;
-
-    /*
-    * A map to cache the mvn: uris and the artifacts that correspond with them if it's mvn protocol
-    * or just uris itself if it's non mvn protocol
-    */
-    private Map<String, Object> bundles = new HashMap<String, Object>();
-
-    /*
-     * A map to cache manifests that have been extracted from the bundles
-     */
-    private Map<Object, Manifest> manifests = new HashMap<Object, Manifest>();
-
-    /*
-     * The list of features, includes both the features to be validated and the features from included <repository>s
-     */
-    private Features features = new Features();
-
-    /*
-     * The packages exported by the features themselves -- useful when features depend on other features
-     */
-    private Map<String, Set<Clause>> featureExports = new HashMap<String, Set<Clause>>();
-
-    /*
-     * The set of packages exported by the system bundle and by Karaf itself
-     */
-    private Set<String> systemExports = new HashSet<String>();
-
-    /**
-     * The Mojo's main method
-     */
-    public void execute() throws MojoExecutionException, MojoFailureException {
-        try {
-            prepare();
-            URI uri = file.toURI();
-            Repository repository = new RepositoryImpl(uri);
-            schemaCheck(repository, uri);
-            analyze(repository);
-            validate(repository);
-        } catch (Exception e) {
-            throw new MojoExecutionException(String.format("Unable to validate %s: %s", file.getAbsolutePath(), e.getMessage()), e);
-        }
-
-    }
-
-    /**
-     * Checks feature repository with XML Schema.
-     *
-     * @param repository Repository object.
-     * @param uri        Display URI.
-     */
-    private void schemaCheck(Repository repository, URI uri) {
-        try {
-            info(" - validation of %s", uri);
-            FeatureValidationUtil.validate(repository.getURI());
-        } catch (Exception e) {
-            error("Failed to validate repository %s. Schema validation fails. Fix errors to continue validation",
-                    e, uri);
-        }
-    }
-
-    /*
-     * Prepare for validation by determing system and Karaf exports
-     */
-    private void prepare() throws Exception {
-        info("== Preparing for validation ==");
-        if (!isCustomStreamURLHandlerSet) {
-            //URL.setURLStreamHandlerFactory can be called at most once in a given Java Virtual
-            //Machine, so set a flag to avoid calling this method multiple times
-            URL.setURLStreamHandlerFactory(new CustomBundleURLStreamHandlerFactory());
-            isCustomStreamURLHandlerSet = true;
-        }
-        info(" - getting list of system bundle exports");
-        readSystemPackages();
-        info(" - getting list of provided bundle exports");
-        readProvidedBundles();
-        info(" - populating repositories with Karaf core features descriptors");
-        appendKarafCoreFeaturesDescriptors();
-    }
-
-    /**
-     * Add Karaf core features URL in the default repositories set
-     */
-    private void appendKarafCoreFeaturesDescriptors() {
-        if (repositories == null) {
-            repositories = new ArrayList<String>();
-        }
-        if (karafVersion == null) {
-            Package p = Package.getPackage("org.apache.karaf.tooling.features");
-            karafVersion = p.getImplementationVersion();
-        }
-        String karafCoreStandardFeaturesUrl = String.format(KARAF_CORE_STANDARD_FEATURE_URL, karafVersion);
-        String karafCoreEnterpriseFeaturesUrl = String.format(KARAF_CORE_ENTERPRISE_FEATURE_URL, karafVersion);
-
-        try {
-            resolve(karafCoreStandardFeaturesUrl);
-            repositories.add(karafCoreStandardFeaturesUrl);
-        } catch (Exception e) {
-            warn("Can't add " + karafCoreStandardFeaturesUrl + " in the default repositories set");
-        }
-
-        try {
-            resolve(karafCoreEnterpriseFeaturesUrl);
-            repositories.add(karafCoreEnterpriseFeaturesUrl);
-        } catch (Exception e) {
-            warn("Can't add " + karafCoreStandardFeaturesUrl + " in the default repositories set");
-        }
-
-    }
-
-    /*
-     * Analyse the descriptor and any <repository>s that might be part of it
-     */
-    private void analyze(Repository repository) throws Exception {
-        info("== Analyzing feature descriptor ==");
-        info(" - read %s", file.getAbsolutePath());
-
-        features.add(repository.getFeatures());
-
-        // add the repositories from the plugin configuration
-        if (repositories != null) {
-            for (String uri : repositories) {
-                getLog().info(String.format(" - adding repository from %s", uri));
-                Repository dependency = new RepositoryImpl(URI.create(translateFromMaven(uri)));
-                schemaCheck(dependency, URI.create(uri));
-                features.add(dependency.getFeatures());
-                validateBundlesAvailable(dependency);
-                analyzeExports(dependency);
-            }
-        }
-
-        for (URI uri : repository.getRepositories()) {
-            Artifact artifact = (Artifact) resolve(uri.toString());
-            Repository dependency = new RepositoryImpl(new File(localRepo.getBasedir(), localRepo.pathOf(artifact)).toURI());
-
-            schemaCheck(dependency, uri);
-            getLog().info(String.format(" - adding %d known features from %s", dependency.getFeatures().length, uri));
-            features.add(dependency.getFeatures());
-            // we need to do this to get all the information ready for further processing
-            validateBundlesAvailable(dependency);
-            analyzeExports(dependency);
-        }
-
-    }
-
-    /*
-     * Perform the actual validation
-     */
-    private void validate(Repository repository) throws Exception {
-        info("== Validating feature descriptor ==");
-        info(" - validating %d features", repository.getFeatures().length);
-        info(" - step 1: Checking if all artifacts exist");
-        validateBundlesAvailable(repository);
-        info("    OK: all %d OSGi bundles have been found", bundles.size());
-        info(" - step 2: Checking if all imports for bundles can be resolved");
-        validateImportsExports(repository);
-        info("== Done! ==========================");
-    }
-
-
-    /*
-     * Determine list of exports by bundles that have been marked provided in the pom
-     * //TODO: we probably want to figure this out somewhere from the Karaf build itself instead of putting the burden on the user
-     */
-    private void readProvidedBundles() throws Exception {
-        DependencyNode tree = dependencyTreeBuilder.buildDependencyTree(project, localRepo, factory, artifactMetadataSource, new ArtifactFilter() {
-
-            public boolean include(Artifact artifact) {
-                return true;
-            }
-
-        }, collector);
-        tree.accept(new DependencyNodeVisitor() {
-            public boolean endVisit(DependencyNode node) {
-                // we want the next sibling too
-                return true;
-            }
-
-            public boolean visit(DependencyNode node) {
-                if (node.getState() != DependencyNode.OMITTED_FOR_CONFLICT) {
-                    Artifact artifact = node.getArtifact();
-                    info("    scanning %s for exports", artifact);
-                    if (Artifact.SCOPE_PROVIDED.equals(artifact.getScope()) && !artifact.getType().equals("pom")) {
-                        try {
-                            for (Clause clause : ManifestUtils.getExports(getManifest("", artifact))) {
-                                getLog().debug(" adding " + clause.getName() + " to list of available packages");
-                                systemExports.add(clause.getName());
-                            }
-                        } catch (ArtifactResolutionException e) {
-                            error("Unable to find bundle exports for %s: %s", e, artifact, e.getMessage());
-                        } catch (ArtifactNotFoundException e) {
-                            error("Unable to find bundle exports for %s: %s", e, artifact, e.getMessage());
-                        } catch (IOException e) {
-                            error("Unable to find bundle exports for %s: %s", e, artifact, e.getMessage());
-                        }
-                    }
-                }
-                // we want the children too
-                return true;
-            }
-        });
-    }
-
-    /*
-     * Read system packages from a properties file
-     * //TODO: we should probably grab this file from the Karaf distro itself instead of duplicating it in the plugin
-     */
-    private void readSystemPackages() throws IOException {
-        Properties properties = new Properties();
-        if (karafConfig.equals("config.properties")) {
-            properties.load(getClass().getClassLoader().getResourceAsStream("config.properties"));
-        } else {
-            properties.load(new FileInputStream(new File(karafConfig)));
-        }
-
-        String packages = (String) properties.get(jreVersion);
-        systemExports.addAll(OSGiHeader.parseHeader(packages).keySet());
-    }
-
-    /*
-     * Analyze exports in all features in the repository without validating the features
-     * (e.g. used for <repository> elements found in a descriptor)
-     */
-    private void analyzeExports(Repository repository) throws Exception {
-        for (Feature feature : repository.getFeatures()) {
-            info("    scanning feature %s for exports", feature.getName());
-            Set<Clause> exports = new HashSet<Clause>();
-            for (String bundle : getBundleLocations(feature)) {
-                exports.addAll(getExports(getManifest(bundle, bundles.get(bundle))));
-            }
-            // add the dependency feature exports
-            exports.addAll(getDependencyFeatureExports(feature));
-            featureExports.put(feature.getName(), exports);
-        }
-    }
-
-    /*
-     * Check if all the bundles can be downloaded and are actually OSGi bundles and not plain JARs
-     */
-    private void validateBundlesAvailable(Repository repository) throws Exception {
-        for (Feature feature : repository.getFeatures()) {
-            for (String bundle : getBundleLocations(feature)) {
-                if (!isMavenProtocol(bundle) && skipNonMavenProtocols) {
-                    continue;
-                }
-                // this will throw an exception if the artifact can not be resolved
-                final Object artifact = resolve(bundle);
-                bundles.put(bundle, artifact);
-                if (isBundle(bundle, artifact)) {
-                    manifests.put(artifact, getManifest(bundle, artifact));
-                } else {
-                    throw new Exception(String.format("%s is not an OSGi bundle", bundle));
-                }
-            }
-        }
-    }
-
-    /*
-     * Get a list of bundle locations in a feature
-     */
-    private List<String> getBundleLocations(Feature feature) {
-        List<String> result = new LinkedList<String>();
-        if (feature != null && feature.getBundles() != null) {
-            for (BundleInfo bundle : feature.getBundles()) {
-                result.add(bundle.getLocation());
-            }
-        }
-        return result;
-    }
-
-    /*
-     * Validate if all features in a repository have bundles which can be resolved
-     */
-    private void validateImportsExports(Repository repository) throws ArtifactResolutionException, ArtifactNotFoundException, Exception {
-        for (Feature feature : repository.getFeatures()) {
-            // make sure the feature hasn't been validated before as a dependency
-            if (!featureExports.containsKey(feature.getName())) {
-                validateImportsExports(feature);
-            }
-        }
-    }
-
-    private Set<Clause> getDependencyFeatureExports(Feature feature) throws Exception {
-        Set<Clause> exports = new HashSet<Clause>();
-
-        for (Dependency dependency : feature.getDependencies()) {
-            if (featureExports.containsKey(dependency.getName())) {
-                exports.addAll(featureExports.get(dependency.getName()));
-            } else {
-                validateImportsExports(features.get(dependency.getName(), dependency.getVersion()));
-                exports.addAll(featureExports.get(dependency.getName()));
-            }
-            exports.addAll(getDependencyFeatureExports(features.get(dependency.getName(), dependency.getVersion())));
-        }
-
-        // add the export of the feature
-        for (String bundle : getBundleLocations(feature)) {
-            Manifest meta = manifests.get(bundles.get(bundle));
-            exports.addAll(getExports(meta));
-        }
-        return exports;
-    }
-
-    /*
-     * Validate if all imports for a feature are being matched with exports
-     */
-    private void validateImportsExports(Feature feature) throws Exception {
-        Map<Clause, String> imports = new HashMap<Clause, String>();
-        Set<Clause> exports = new HashSet<Clause>();
-        for (Dependency dependency : feature.getDependencies()) {
-            if (!featureExports.containsKey(dependency.getName())) {
-                validateImportsExports(features.get(dependency.getName(), dependency.getVersion()));
-            }
-        }
-        // add the exports for dependency feature
-        exports.addAll(getDependencyFeatureExports(feature));
-        for (String bundle : getBundleLocations(feature)) {
-            Manifest meta = manifests.get(bundles.get(bundle));
-            exports.addAll(getExports(meta));
-            for (Clause clause : getMandatoryImports(meta)) {
-                imports.put(clause, bundle);
-            }
-        }
-
-        // setting up the set of required imports
-        Set<Clause> requirements = new HashSet<Clause>();
-        requirements.addAll(imports.keySet());
-
-        // now, let's remove requirements whenever we find a matching export for them
-        for (Clause element : imports.keySet()) {
-            if (systemExports.contains(element.getName())) {
-                debug("%s is resolved by a system bundle export or provided bundle", element);
-                requirements.remove(element);
-                continue;
-            }
-            for (Clause export : exports) {
-                if (matches(element, export)) {
-                    debug("%s is resolved by export %s", element, export);
-                    requirements.remove(element);
-                    continue;
-                }
-                debug("%s is not resolved by export %s", element, export);
-            }
-        }
-
-        // if there are any more requirements left here, there's a problem with the feature 
-        if (!requirements.isEmpty()) {
-            warn("Failed to validate feature %s", feature.getName());
-            for (Clause entry : requirements) {
-                warn("No export found to match %s (imported by %s)",
-                        entry, imports.get(entry));
-            }
-            throw new Exception(String.format("%d unresolved imports in feature %s",
-                    requirements.size(), feature.getName()));
-        }
-        info("    OK: imports resolved for %s", feature.getName());
-        featureExports.put(feature.getName(), exports);
-    }
-
-    /*
-    * Check if the artifact is an OSGi bundle
-    */
-    private boolean isBundle(String bundle, Object artifact) {
-        if (artifact instanceof Artifact && "bundle".equals(((Artifact) artifact).getArtifactHandler().getPackaging())) {
-            return true;
-        } else {
-            try {
-                return ManifestUtils.isBundle(getManifest(bundle, artifact));
-            } catch (Exception e) {
-                getLog().debug("Unable to determine if " + artifact + " is a bundle; defaulting to false", e);
-            }
-        }
-        return false;
-    }
-
-    /*
-     * Extract the META-INF/MANIFEST.MF file from an artifact
-     */
-    private Manifest getManifest(String bundle, Object artifact) throws ArtifactResolutionException, ArtifactNotFoundException,
-            IOException {
-        if (!(artifact instanceof Artifact)) {
-            //not resolved as mvn artifact, so it's non-mvn protocol, just use the CustomBundleURLStreamHandlerFactory
-            // to open stream
-            try (
-                InputStream is = new BufferedInputStream(new URL(bundle).openStream());
-                JarInputStream jar = new JarInputStream(is)
-            ) {
-                Manifest m = jar.getManifest();
-                if (m == null) {
-                    throw new IOException("Manifest not present in the first entry of the zip");
-                }
-                return m;
-            }
-        } else {
-        	ZipFile file;
-            Artifact mvnArtifact = (Artifact) artifact;
-            File localFile = new File(localRepo.pathOf(mvnArtifact));
-            if (localFile.exists()) {
-                // avoid going over to the repository if the file is already on
-                // the disk
-                file = new ZipFile(localFile);
-            } else {
-                artifactResolver.resolve(mvnArtifact, remoteRepos, localRepo);
-                file = new ZipFile(mvnArtifact.getFile());
-            }
-            ZipEntry entry = file.getEntry("META-INF/MANIFEST.MF");
-            if (entry == null) {
-                throw new IOException("Manifest not present in the first entry of the zip");
-            }
-            // let's replace syserr for now to hide warnings being issues by the Manifest reading process
-            PrintStream original = System.err;
-            try {
-                System.setErr(new PrintStream(new ByteArrayOutputStream()));
-                try (
-                    InputStream is = file.getInputStream(entry)
-                ) {
-                    Manifest manifest = new Manifest(is);
-                    return manifest;
-                }
-            } finally {
-                System.setErr(original);
-            }
-        }
-    }
-
-    /*
-     * Resolve an artifact, downloading it from remote repositories when necessary
-     */
-    private Object resolve(String bundle) throws Exception, ArtifactNotFoundException {
-        if (!isMavenProtocol(bundle)) {
-            return bundle;
-        }
-        Artifact artifact = getArtifact(bundle);
-        if (bundle.indexOf(MVN_REPO_SEPARATOR) >= 0) {
-            if (bundle.startsWith(MVN_URI_PREFIX)) {
-                bundle = bundle.substring(MVN_URI_PREFIX.length());
-            }
-            String repo = bundle.substring(0, bundle.indexOf(MVN_REPO_SEPARATOR));
-            ArtifactRepository repository = new DefaultArtifactRepository(artifact.getArtifactId() + "-repo", repo,
-                    new DefaultRepositoryLayout());
-            List<ArtifactRepository> repos = new LinkedList<ArtifactRepository>();
-            repos.add(repository);
-            artifactResolver.resolve(artifact, repos, localRepo);
-        } else {
-            artifactResolver.resolve(artifact, remoteRepos, localRepo);
-        }
-        if (artifact == null) {
-            throw new Exception("Unable to resolve artifact for uri " + bundle);
-        } else {
-            return artifact;
-        }
-    }
-
-    /*
-     * Create an artifact for a given mvn: uri
-     */
-    private Artifact getArtifact(String uri) {
-        // remove the mvn: prefix when necessary
-        if (uri.startsWith(MVN_URI_PREFIX)) {
-            uri = uri.substring(MVN_URI_PREFIX.length());
-        }
-        // remove the repository url when specified
-        if (uri.contains(MVN_REPO_SEPARATOR)) {
-            uri = uri.split(MVN_REPO_SEPARATOR)[1];
-        }
-        String[] elements = uri.split("/");
-
-        switch (elements.length) {
-            case 5:
-                return factory.createArtifactWithClassifier(elements[0], elements[1], elements[2], elements[3], elements[4]);
-            case 4:
-                return factory.createArtifact(elements[0], elements[1], elements[2], Artifact.SCOPE_PROVIDED, elements[3]);
-            case 3:
-                return factory.createArtifact(elements[0], elements[1], elements[2], Artifact.SCOPE_PROVIDED, "jar");
-            default:
-                return null;
-        }
-
-    }
-
-    /*
-    * see if bundle url is start with mvn protocol
-    */
-    private boolean isMavenProtocol(String bundle) {
-        return bundle.startsWith(MVN_URI_PREFIX);
-    }
-
-    /*
-     * Helper method for debug logging
-     */
-    private void debug(String message, Object... parms) {
-        if (getLog().isDebugEnabled()) {
-            getLog().debug(String.format(message, parms));
-        }
-    }
-
-    /*
-     * Helper method for info logging
-     */
-    private void info(String message, Object... parms) {
-        getLog().info(String.format(message, parms));
-    }
-
-    /*
-     * Helper method for warn logging
-     */
-    private void warn(String message, Object... parms) {
-        getLog().warn(String.format(message, parms));
-    }
-
-    /*
-     * Helper method for error logging
-     */
-    private void error(String message, Exception error, Object... parms) {
-        getLog().error(String.format(message, parms), error);
-    }
-
-    /*
-     * Convenience collection for holding features
-     */
-    private class Features {
-
-        private List<Feature> features = new LinkedList<Feature>();
-
-        public void add(Feature feature) {
-            features.add(feature);
-        }
-
-        public Feature get(String name, String version) throws Exception {
-            for (Feature feature : features) {
-                if (name.equals(feature.getName()) && version.equals(feature.getVersion())) {
-                    return feature;
-                }
-            }
-            throw new Exception(String.format("Unable to find definition for feature %s (version %s)",
-                    name, version));
-        }
-
-        public void add(Feature[] array) {
-            for (Feature feature : array) {
-                add(feature);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/karaf/blob/056239dc/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/features/VerifyFeatureResolutionMojo.java
----------------------------------------------------------------------
diff --git a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/features/VerifyFeatureResolutionMojo.java b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/features/VerifyFeatureResolutionMojo.java
deleted file mode 100644
index 902b87a..0000000
--- a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/features/VerifyFeatureResolutionMojo.java
+++ /dev/null
@@ -1,805 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.karaf.tooling.features;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Reader;
-import java.lang.reflect.Field;
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.Method;
-import java.lang.reflect.Proxy;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Hashtable;
-import java.util.Iterator;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.jar.Attributes;
-import java.util.jar.Manifest;
-import java.util.regex.Pattern;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-import aQute.bnd.osgi.Macro;
-import aQute.bnd.osgi.Processor;
-import org.apache.felix.resolver.Logger;
-import org.apache.felix.resolver.ResolverImpl;
-import org.apache.felix.utils.version.VersionRange;
-import org.apache.felix.utils.version.VersionTable;
-import org.apache.karaf.features.FeatureEvent;
-import org.apache.karaf.features.FeaturesService;
-import org.apache.karaf.features.internal.download.DownloadCallback;
-import org.apache.karaf.features.internal.download.DownloadManager;
-import org.apache.karaf.features.internal.download.Downloader;
-import org.apache.karaf.features.internal.download.StreamProvider;
-import org.apache.karaf.features.internal.model.Conditional;
-import org.apache.karaf.features.internal.model.ConfigFile;
-import org.apache.karaf.features.internal.model.Feature;
-import org.apache.karaf.features.internal.model.Features;
-import org.apache.karaf.features.internal.model.JaxbUtil;
-import org.apache.karaf.features.internal.resolver.ResourceBuilder;
-import org.apache.karaf.features.internal.resolver.ResourceImpl;
-import org.apache.karaf.features.internal.resolver.ResourceUtils;
-import org.apache.karaf.features.internal.service.Deployer;
-import org.apache.karaf.features.internal.service.State;
-import org.apache.karaf.features.internal.util.MapUtils;
-import org.apache.karaf.features.internal.util.MultiException;
-import org.apache.karaf.profile.assembly.CustomDownloadManager;
-import org.apache.karaf.tooling.utils.MojoSupport;
-import org.apache.karaf.util.config.PropertiesLoader;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.plugin.MojoExecutionException;
-import org.apache.maven.plugin.MojoFailureException;
-import org.apache.maven.plugins.annotations.Mojo;
-import org.apache.maven.plugins.annotations.Parameter;
-import org.apache.maven.plugins.annotations.ResolutionScope;
-import org.apache.maven.project.MavenProject;
-import org.ops4j.pax.url.mvn.MavenResolver;
-import org.ops4j.pax.url.mvn.MavenResolvers;
-import org.osgi.framework.Bundle;
-import org.osgi.framework.BundleException;
-import org.osgi.framework.Constants;
-import org.osgi.framework.InvalidSyntaxException;
-import org.osgi.framework.Version;
-import org.osgi.framework.namespace.IdentityNamespace;
-import org.osgi.framework.startlevel.BundleStartLevel;
-import org.osgi.framework.wiring.BundleCapability;
-import org.osgi.framework.wiring.BundleRequirement;
-import org.osgi.framework.wiring.BundleRevision;
-import org.osgi.framework.wiring.BundleWiring;
-import org.osgi.resource.Requirement;
-import org.osgi.resource.Resource;
-import org.osgi.resource.Wire;
-import org.osgi.service.resolver.ResolutionException;
-
-import static java.util.jar.JarFile.MANIFEST_NAME;
-
-@Mojo(name = "verify-features", requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME)
-public class VerifyFeatureResolutionMojo extends MojoSupport {
-
-    @Parameter(property = "descriptors")
-    protected Set<String> descriptors;
-
-    @Parameter(property = "features")
-    protected Set<String> features;
-
-    @Parameter(property = "framework")
-    protected Set<String> framework;
-
-    @Parameter(property = "configuration")
-    protected String configuration;
-
-    @Parameter(property = "distribution", defaultValue = "org.apache.karaf:apache-karaf")
-    protected String distribution;
-
-    @Parameter(property = "javase")
-    protected String javase;
-
-    @Parameter(property = "dist-dir")
-    protected String distDir;
-
-    @Parameter(property = "additional-metadata")
-    protected File additionalMetadata;
-
-    @Parameter(property = "ignore-missing-conditions")
-    protected boolean ignoreMissingConditions;
-
-    @Parameter(property = "fail")
-    protected String fail = "end";
-
-    @Parameter(property = "verify-transitive")
-    protected boolean verifyTransitive = false;
-
-    @Parameter(defaultValue = "${project}", readonly = true)
-    protected MavenProject project;
-
-    protected MavenResolver resolver;
-
-    @Override
-    public void execute() throws MojoExecutionException, MojoFailureException {
-        Hashtable<String, String> config = new Hashtable<>();
-        StringBuilder remote = new StringBuilder();
-        for (Object obj : project.getRemoteProjectRepositories()) {
-            if (remote.length() > 0) {
-                remote.append(",");
-            }
-            remote.append(invoke(obj, "getUrl"));
-            remote.append("@id=").append(invoke(obj, "getId"));
-            if (!((Boolean) invoke(getPolicy(obj, false), "isEnabled"))) {
-                remote.append("@noreleases");
-            }
-            if ((Boolean) invoke(getPolicy(obj, true), "isEnabled")) {
-                remote.append("@snapshots");
-            }
-        }
-        getLog().info("Using repositories: " + remote.toString());
-        config.put("maven.repositories", remote.toString());
-        // TODO: add more configuration bits ?
-        resolver = MavenResolvers.createMavenResolver(config, "maven");
-        doExecute();
-    }
-
-    private Object invoke(Object object, String getter) throws MojoExecutionException {
-        try {
-            return object.getClass().getMethod(getter).invoke(object);
-        } catch (Exception e) {
-            throw new MojoExecutionException("Unable to build remote repository from " + object.toString(), e);
-        }
-    }
-
-    private Object getPolicy(Object object, boolean snapshots) throws MojoExecutionException {
-        return invoke(object, "getPolicy", new Class[] { Boolean.TYPE }, new Object[] { snapshots });
-    }
-
-    private Object invoke(Object object, String getter, Class[] types, Object[] params) throws MojoExecutionException {
-        try {
-            return object.getClass().getMethod(getter, types).invoke(object, params);
-        } catch (Exception e) {
-            throw new MojoExecutionException("Unable to build remote repository from " + object.toString(), e);
-        }
-    }
-
-    protected void doExecute() throws MojoExecutionException, MojoFailureException {
-        System.setProperty("karaf.home", "target/karaf");
-        System.setProperty("karaf.data", "target/karaf/data");
-
-        Hashtable<String, String> properties = new Hashtable<>();
-
-        if (additionalMetadata != null) {
-            try (Reader reader = new FileReader(additionalMetadata)) {
-                Properties metadata = new Properties();
-                metadata.load(reader);
-                for (Enumeration<?> e = metadata.propertyNames(); e.hasMoreElements(); ) {
-                    Object key = e.nextElement();
-                    Object val = metadata.get(key);
-                    properties.put(key.toString(), val.toString());
-                }
-            } catch (IOException e) {
-                throw new MojoExecutionException("Unable to load additional metadata from " + additionalMetadata, e);
-            }
-        }
-
-        // TODO: allow using external configuration ?
-        ScheduledExecutorService executor = Executors.newScheduledThreadPool(8);
-        DownloadManager manager = new CustomDownloadManager(resolver, executor);
-        final Map<String, Features> repositories;
-        Map<String, List<Feature>> allFeatures = new HashMap<>();
-        try {
-            repositories = loadRepositories(manager, descriptors);
-            for (String repoUri : repositories.keySet()) {
-                List<Feature> features = repositories.get(repoUri).getFeature();
-                // Ack features to inline configuration files urls
-                for (Feature feature : features) {
-                    for (org.apache.karaf.features.internal.model.Bundle bi : feature.getBundle()) {
-                        String loc = bi.getLocation();
-                        String nloc = null;
-                        if (loc.contains("file:")) {
-                            for (ConfigFile cfi : feature.getConfigfile()) {
-                                if (cfi.getFinalname().substring(1)
-                                        .equals(loc.substring(loc.indexOf("file:") + "file:".length()))) {
-                                    nloc = cfi.getLocation();
-                                }
-                            }
-                        }
-                        if (nloc != null) {
-                            Field field = bi.getClass().getDeclaredField("location");
-                            field.setAccessible(true);
-                            field.set(bi, loc.substring(0, loc.indexOf("file:")) + nloc);
-                        }
-                    }
-                }
-                allFeatures.put(repoUri, features);
-            }
-        } catch (Exception e) {
-            throw new MojoExecutionException("Unable to load features descriptors", e);
-        }
-
-        List<Feature> featuresToTest = new ArrayList<>();
-        if (verifyTransitive) {
-            for (List<Feature> features : allFeatures.values()) {
-                featuresToTest.addAll(features);
-            }
-        } else {
-            for (String uri : descriptors) {
-                featuresToTest.addAll(allFeatures.get(uri));
-            }
-        }
-        if (features != null && !features.isEmpty()) {
-            StringBuilder sb = new StringBuilder();
-            for (String feature : features) {
-                if (sb.length() > 0) {
-                    sb.append("|");
-                }
-                String p = feature.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*");
-                sb.append(p);
-                if (!feature.contains("/")) {
-                    sb.append("/.*");
-                }
-            }
-            Pattern pattern = Pattern.compile(sb.toString());
-            for (Iterator<Feature> iterator = featuresToTest.iterator(); iterator.hasNext();) {
-                Feature feature = iterator.next();
-                String id = feature.getName() + "/" + feature.getVersion();
-                if (!pattern.matcher(id).matches()) {
-                    iterator.remove();
-                }
-            }
-        }
-
-        for (String fmk : framework) {
-            properties.put("feature.framework." + fmk, fmk);
-        }
-        List<Exception> failures = new ArrayList<>();
-        for (Feature feature : featuresToTest) {
-            try {
-                String id = feature.getName() + "/" + feature.getVersion();
-                verifyResolution(manager, repositories, Collections.singleton(id), properties);
-                getLog().info("Verification of feature " + id + " succeeded");
-            } catch (Exception e) {
-                if (e.getCause() instanceof ResolutionException) {
-                    getLog().warn(e.getMessage());
-                } else {
-                    getLog().warn(e);
-                }
-                failures.add(e);
-                if ("first".equals(fail)) {
-                    throw e;
-                }
-            }
-            for (Conditional cond : feature.getConditional()) {
-                Set<String> ids = new LinkedHashSet<>();
-                ids.add(feature.getId());
-                ids.addAll(cond.getCondition());
-                try {
-                    verifyResolution(manager, repositories, ids, properties);
-                    getLog().info("Verification of feature " + ids + " succeeded");
-                } catch (Exception e) {
-                    if (ignoreMissingConditions && e.getCause() instanceof ResolutionException) {
-                        boolean ignore = true;
-                        Collection<Requirement> requirements = ((ResolutionException) e.getCause()).getUnresolvedRequirements();
-                        for (Requirement req : requirements) {
-                            ignore &= (IdentityNamespace.IDENTITY_NAMESPACE.equals(req.getNamespace())
-                                    && ResourceUtils.TYPE_FEATURE.equals(req.getAttributes().get("type"))
-                                    && cond.getCondition().contains(req.getAttributes().get(IdentityNamespace.IDENTITY_NAMESPACE).toString()));
-                        }
-                        if (ignore) {
-                            getLog().warn("Feature resolution failed for " + ids
-                                    + "\nMessage: " + e.getCause().getMessage());
-                            continue;
-                        }
-                    }
-                    if (e.getCause() instanceof ResolutionException) {
-                        getLog().warn(e.getMessage());
-                    } else {
-                        getLog().warn(e);
-                    }
-                    failures.add(e);
-                    if ("first".equals(fail)) {
-                        throw e;
-                    }
-                }
-            }
-        }
-        if ("end".equals(fail) && !failures.isEmpty()) {
-            throw new MojoExecutionException("Verification failures", new MultiException("Verification failures", failures));
-        }
-    }
-
-    private void verifyResolution(DownloadManager manager, final Map<String, Features> repositories, Set<String> features, Hashtable<String, String> properties) throws MojoExecutionException {
-        try {
-            Bundle systemBundle = getSystemBundle(getMetadata(properties, "metadata#"));
-            DummyDeployCallback callback = new DummyDeployCallback(systemBundle, repositories.values());
-            Deployer deployer = new Deployer(manager, new ResolverImpl(new MavenResolverLog()), callback);
-
-
-            // Install framework
-            Deployer.DeploymentRequest request = createDeploymentRequest();
-
-            for (String fmwk : framework) {
-                MapUtils.addToMapSet(request.requirements, FeaturesService.ROOT_REGION, fmwk);
-            }
-            try {
-                deployer.deploy(callback.getDeploymentState(), request);
-            } catch (Exception e) {
-                throw new MojoExecutionException("Unable to resolve framework features", e);
-            }
-
-
-            /*
-            boolean resolveOptionalImports = getResolveOptionalImports(properties);
-
-            DeploymentBuilder builder = new DeploymentBuilder(
-                    manager,
-                    null,
-                    repositories.values(),
-                    -1 // Disable url handlers
-            );
-            Map<String, Resource> downloadedResources = builder.download(
-                    getPrefixedProperties(properties, "feature."),
-                    getPrefixedProperties(properties, "bundle."),
-                    getPrefixedProperties(properties, "fab."),
-                    getPrefixedProperties(properties, "req."),
-                    getPrefixedProperties(properties, "override."),
-                    getPrefixedProperties(properties, "optional."),
-                    getMetadata(properties, "metadata#")
-            );
-
-            for (String uri : getPrefixedProperties(properties, "resources.")) {
-                builder.addResourceRepository(new MetadataRepository(new HttpMetadataProvider(uri)));
-            }
-            */
-
-
-            // Install features
-            for (String feature : features) {
-                MapUtils.addToMapSet(request.requirements, FeaturesService.ROOT_REGION, feature);
-            }
-            try {
-                Set<String> prereqs = new HashSet<>();
-                while (true) {
-                    try {
-                        deployer.deploy(callback.getDeploymentState(), request);
-                        break;
-                    } catch (Deployer.PartialDeploymentException e) {
-                        if (!prereqs.containsAll(e.getMissing())) {
-                            prereqs.addAll(e.getMissing());
-                        } else {
-                            throw new Exception("Deployment aborted due to loop in missing prerequisites: " + e.getMissing());
-                        }
-                    }
-                }
-                // TODO: find unused resources ?
-            } catch (Exception e) {
-                throw new MojoExecutionException("Feature resolution failed for " + features
-                        + "\nMessage: " + e.getMessage()
-                        + "\nRepositories: " + toString(new TreeSet<>(repositories.keySet()))
-                        + "\nResources: " + toString(new TreeSet<>(manager.getProviders().keySet())), e);
-            }
-
-
-        } catch (MojoExecutionException e) {
-            throw e;
-        } catch (Exception e) {
-            throw new MojoExecutionException("Error verifying feature " + features + "\nMessage: " + e.getMessage(), e);
-        }
-    }
-
-    private Deployer.DeploymentRequest createDeploymentRequest() {
-        Deployer.DeploymentRequest request = new Deployer.DeploymentRequest();
-        request.bundleUpdateRange = FeaturesService.DEFAULT_BUNDLE_UPDATE_RANGE;
-        request.featureResolutionRange = FeaturesService.DEFAULT_FEATURE_RESOLUTION_RANGE;
-        request.overrides = new HashSet<>();
-        request.requirements = new HashMap<>();
-        request.stateChanges = new HashMap<>();
-        request.options = EnumSet.noneOf(FeaturesService.Option.class);
-        return request;
-    }
-
-    private String toString(Collection<String> collection) {
-        StringBuilder sb = new StringBuilder();
-        sb.append("{\n");
-        for (String s : collection) {
-            sb.append("\t").append(s).append("\n");
-        }
-        sb.append("}");
-        return sb.toString();
-    }
-
-    private Bundle getSystemBundle(Map<String, Map<VersionRange, Map<String, String>>> metadata) throws Exception {
-        URL configPropURL;
-        if (configuration != null) {
-            configPropURL = new URL(configuration);
-        } else {
-            Artifact karafDistro = project.getArtifactMap().get(distribution);
-            if (karafDistro == null) {
-                throw new MojoFailureException("The karaf distribution " + distribution + " is not a dependency");
-            }
-            if ("kar".equals(karafDistro.getType()) && distDir == null) {
-                distDir = "resources";
-            }
-            String dir = distDir;
-            if (dir == null) {
-                dir = karafDistro.getArtifactId() + "-" + karafDistro.getBaseVersion();
-            }
-            configPropURL = new URL("jar:file:" + karafDistro.getFile() + "!/" + dir + "/etc/config.properties");
-        }
-        org.apache.felix.utils.properties.Properties configProps = PropertiesLoader.loadPropertiesFile(configPropURL, true);
-//        copySystemProperties(configProps);
-        if (javase == null) {
-            configProps.put("java.specification.version", System.getProperty("java.specification.version"));
-        } else {
-            configProps.put("java.specification.version", javase);
-        }
-        configProps.substitute();
-
-        Attributes attributes = new Attributes();
-        attributes.putValue(Constants.BUNDLE_MANIFESTVERSION, "2");
-        attributes.putValue(Constants.BUNDLE_SYMBOLICNAME, "system.bundle");
-        attributes.putValue(Constants.BUNDLE_VERSION, "0.0.0");
-
-        String exportPackages = configProps.getProperty("org.osgi.framework.system.packages");
-        if (configProps.containsKey("org.osgi.framework.system.packages.extra")) {
-            exportPackages += "," + configProps.getProperty("org.osgi.framework.system.packages.extra");
-        }
-        exportPackages = exportPackages.replaceAll(",\\s*,", ",");
-        attributes.putValue(Constants.EXPORT_PACKAGE, exportPackages);
-
-        String systemCaps = configProps.getProperty("org.osgi.framework.system.capabilities");
-        attributes.putValue(Constants.PROVIDE_CAPABILITY, systemCaps);
-
-        // TODO: support metadata overrides on system bundle
-//        attributes = DeploymentBuilder.overrideAttributes(attributes, metadata);
-
-        final Hashtable<String, String> headers = new Hashtable<>();
-        for (Map.Entry attr : attributes.entrySet()) {
-            headers.put(attr.getKey().toString(), attr.getValue().toString());
-        }
-
-        final FakeBundleRevision resource = new FakeBundleRevision(headers, "system-bundle", 0l);
-        return resource.getBundle();
-    }
-
-
-    public static Map<String, Features> loadRepositories(DownloadManager manager, Set<String> uris) throws Exception {
-        final Map<String, Features> loaded = new HashMap<>();
-        final Downloader downloader = manager.createDownloader();
-        for (String repository : uris) {
-            downloader.download(repository, new DownloadCallback() {
-                @Override
-                public void downloaded(final StreamProvider provider) throws Exception {
-                    try (InputStream is = provider.open()) {
-                        Features featuresModel = JaxbUtil.unmarshal(provider.getUrl(), is, false);
-                        synchronized (loaded) {
-                            loaded.put(provider.getUrl(), featuresModel);
-                            for (String innerRepository : featuresModel.getRepository()) {
-                                downloader.download(innerRepository, this);
-                            }
-                        }
-                    }
-                }
-            });
-        }
-        downloader.await();
-        return loaded;
-    }
-
-    public static Set<String> getPrefixedProperties(Map<String, String> properties, String prefix) {
-        Set<String> result = new HashSet<>();
-        for (String key : properties.keySet()) {
-            if (key.startsWith(prefix)) {
-                String url = properties.get(key);
-                if (url == null || url.length() == 0) {
-                    url = key.substring(prefix.length());
-                }
-                if (!url.isEmpty()) {
-                    result.add(url);
-                }
-            }
-        }
-        return result;
-    }
-
-    public static Map<String, Map<VersionRange, Map<String, String>>> getMetadata(Map<String, String> properties, String prefix) {
-        Map<String, Map<VersionRange, Map<String, String>>> result = new HashMap<>();
-        for (String key : properties.keySet()) {
-            if (key.startsWith(prefix)) {
-                String val = properties.get(key);
-                key = key.substring(prefix.length());
-                String[] parts = key.split("#");
-                if (parts.length == 3) {
-                    Map<VersionRange, Map<String, String>> ranges = result.get(parts[0]);
-                    if (ranges == null) {
-                        ranges = new HashMap<>();
-                        result.put(parts[0], ranges);
-                    }
-                    String version = parts[1];
-                    if (!version.startsWith("[") && !version.startsWith("(")) {
-                        Processor processor = new Processor();
-                        processor.setProperty("@", VersionTable.getVersion(version).toString());
-                        Macro macro = new Macro(processor);
-                        version = macro.process("${range;[==,=+)}");
-                    }
-                    VersionRange range = new VersionRange(version);
-                    Map<String, String> hdrs = ranges.get(range);
-                    if (hdrs == null) {
-                        hdrs = new HashMap<>();
-                        ranges.put(range, hdrs);
-                    }
-                    hdrs.put(parts[2], val);
-                }
-            }
-        }
-        return result;
-    }
-
-    public static class FakeBundleRevision extends ResourceImpl implements BundleRevision, BundleStartLevel {
-
-        private final Bundle bundle;
-        private int startLevel;
-
-        public FakeBundleRevision(final Hashtable<String, String> headers, final String location, final long bundleId) throws BundleException {
-            ResourceBuilder.build(this, location, headers);
-            this.bundle = (Bundle) Proxy.newProxyInstance(
-                    getClass().getClassLoader(),
-                    new Class[] { Bundle.class },
-                    new InvocationHandler() {
-                        @Override
-                        public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
-                            if (method.getName().equals("hashCode")) {
-                                return FakeBundleRevision.this.hashCode();
-                            } else if (method.getName().equals("equals")) {
-                                return proxy == args[0];
-                            } else if (method.getName().equals("toString")) {
-                                return bundle.getSymbolicName() + "/" + bundle.getVersion();
-                            } else if (method.getName().equals("adapt")) {
-                                if (args.length == 1 && args[0] == BundleRevision.class) {
-                                    return FakeBundleRevision.this;
-                                } else if (args.length == 1 && args[0] == BundleStartLevel.class) {
-                                    return FakeBundleRevision.this;
-                                }
-                            } else if (method.getName().equals("getHeaders")) {
-                                return headers;
-                            } else if (method.getName().equals("getBundleId")) {
-                                return bundleId;
-                            } else if (method.getName().equals("getLocation")) {
-                                return location;
-                            } else if (method.getName().equals("getSymbolicName")) {
-                                String name = headers.get(Constants.BUNDLE_SYMBOLICNAME);
-                                int idx = name.indexOf(';');
-                                if (idx > 0) {
-                                    name = name.substring(0, idx).trim();
-                                }
-                                return name;
-                            } else if (method.getName().equals("getVersion")) {
-                                return new Version(headers.get(Constants.BUNDLE_VERSION));
-                            } else if (method.getName().equals("getState")) {
-                                return Bundle.ACTIVE;
-                            } else if (method.getName().equals("getLastModified")) {
-                                return 0l;
-                            }
-                            return null;
-                        }
-                    });
-        }
-
-        @Override
-        public int getStartLevel() {
-            return startLevel;
-        }
-
-        @Override
-        public void setStartLevel(int startLevel) {
-            this.startLevel = startLevel;
-        }
-
-        @Override
-        public boolean isPersistentlyStarted() {
-            return true;
-        }
-
-        @Override
-        public boolean isActivationPolicyUsed() {
-            return false;
-        }
-
-        @Override
-        public String getSymbolicName() {
-            return bundle.getSymbolicName();
-        }
-
-        @Override
-        public Version getVersion() {
-            return bundle.getVersion();
-        }
-
-        @Override
-        public List<BundleCapability> getDeclaredCapabilities(String namespace) {
-            throw new UnsupportedOperationException();
-        }
-
-        @Override
-        public List<BundleRequirement> getDeclaredRequirements(String namespace) {
-            throw new UnsupportedOperationException();
-        }
-
-        @Override
-        public int getTypes() {
-            throw new UnsupportedOperationException();
-        }
-
-        @Override
-        public BundleWiring getWiring() {
-            throw new UnsupportedOperationException();
-        }
-
-        @Override
-        public Bundle getBundle() {
-            return bundle;
-        }
-    }
-
-    public static class DummyDeployCallback implements Deployer.DeployCallback {
-
-        private final Bundle systemBundle;
-        private final Deployer.DeploymentState dstate;
-        private final AtomicLong nextBundleId = new AtomicLong(0);
-
-        public DummyDeployCallback(Bundle sysBundle, Collection<Features> repositories) throws Exception {
-            systemBundle = sysBundle;
-            dstate = new Deployer.DeploymentState();
-            dstate.bundles = new HashMap<>();
-            dstate.features = new HashMap<>();
-            dstate.bundlesPerRegion = new HashMap<>();
-            dstate.filtersPerRegion = new HashMap<>();
-            dstate.state = new State();
-
-            MapUtils.addToMapSet(dstate.bundlesPerRegion, FeaturesService.ROOT_REGION, 0l);
-            dstate.bundles.put(0l, systemBundle);
-            for (Features repo : repositories) {
-                for (Feature f : repo.getFeature()) {
-                    dstate.features.put(f.getId(), f);
-                }
-            }
-        }
-
-        public Deployer.DeploymentState getDeploymentState() {
-            return dstate;
-        }
-
-        @Override
-        public void print(String message, boolean verbose) {
-        }
-
-        @Override
-        public void saveState(State state) {
-            dstate.state.replace(state);
-        }
-
-        @Override
-        public void persistResolveRequest(Deployer.DeploymentRequest request) throws IOException {
-        }
-
-        @Override
-        public void installFeature(org.apache.karaf.features.Feature feature) throws IOException, InvalidSyntaxException {
-        }
-
-        @Override
-        public void callListeners(FeatureEvent featureEvent) {
-        }
-
-        @Override
-        public Bundle installBundle(String region, String uri, InputStream is) throws BundleException {
-            try {
-                Hashtable<String, String> headers = new Hashtable<>();
-                ZipInputStream zis = new ZipInputStream(is);
-                ZipEntry entry;
-                while ((entry = zis.getNextEntry()) != null) {
-                    if (MANIFEST_NAME.equals(entry.getName())) {
-                        Attributes attributes = new Manifest(zis).getMainAttributes();
-                        for (Map.Entry attr : attributes.entrySet()) {
-                            headers.put(attr.getKey().toString(), attr.getValue().toString());
-                        }
-                    }
-                }
-                BundleRevision revision = new FakeBundleRevision(headers, uri, nextBundleId.incrementAndGet());
-                Bundle bundle = revision.getBundle();
-                MapUtils.addToMapSet(dstate.bundlesPerRegion, region, bundle.getBundleId());
-                dstate.bundles.put(bundle.getBundleId(), bundle);
-                return bundle;
-            } catch (IOException e) {
-                throw new BundleException("Unable to install bundle", e);
-            }
-        }
-
-        @Override
-        public void updateBundle(Bundle bundle, String uri, InputStream is) throws BundleException {
-            throw new UnsupportedOperationException();
-        }
-
-        @Override
-        public void uninstall(Bundle bundle) throws BundleException {
-            throw new UnsupportedOperationException();
-        }
-
-        @Override
-        public void startBundle(Bundle bundle) throws BundleException {
-        }
-
-        @Override
-        public void stopBundle(Bundle bundle, int options) throws BundleException {
-        }
-
-        @Override
-        public void setBundleStartLevel(Bundle bundle, int startLevel) {
-        }
-
-        @Override
-        public void refreshPackages(Collection<Bundle> bundles) throws InterruptedException {
-        }
-
-        @Override
-        public void resolveBundles(Set<Bundle> bundles, Map<Resource, List<Wire>> wiring, Map<Resource, Bundle> resToBnd) {
-        }
-
-        @Override
-        public void replaceDigraph(Map<String, Map<String, Map<String, Set<String>>>> policies, Map<String, Set<Long>> bundles) throws BundleException, InvalidSyntaxException {
-        }
-    }
-
-    public class MavenResolverLog extends org.apache.felix.resolver.Logger {
-
-        public MavenResolverLog() {
-            super(Logger.LOG_DEBUG);
-        }
-
-        @Override
-        protected void doLog(int level, String msg, Throwable throwable) {
-            switch (level) {
-            case LOG_DEBUG:
-                getLog().debug(msg, throwable);
-                break;
-            case LOG_INFO:
-                getLog().info(msg, throwable);
-                break;
-            case LOG_WARNING:
-                getLog().warn(msg, throwable);
-                break;
-            case LOG_ERROR:
-                getLog().error(msg, throwable);
-                break;
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/karaf/blob/056239dc/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/instances/CreateArchiveMojo.java
----------------------------------------------------------------------
diff --git a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/instances/CreateArchiveMojo.java b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/instances/CreateArchiveMojo.java
deleted file mode 100644
index 98d98a1..0000000
--- a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/instances/CreateArchiveMojo.java
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.karaf.tooling.instances;
-
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.file.DirectoryStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-
-import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
-import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.compress.archivers.tar.TarConstants;
-import org.apache.commons.compress.archivers.zip.UnixStat;
-import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
-import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
-import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
-import org.apache.karaf.tooling.utils.MojoSupport;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.plugin.MojoExecutionException;
-import org.apache.maven.plugin.MojoFailureException;
-import org.apache.maven.plugins.annotations.LifecyclePhase;
-import org.apache.maven.plugins.annotations.Mojo;
-import org.apache.maven.plugins.annotations.Parameter;
-import org.apache.maven.plugins.annotations.ResolutionScope;
-
-/**
- * Package a server archive from an assembled server
- */
-@Mojo(name = "instance-create-archive", defaultPhase = LifecyclePhase.PACKAGE, requiresDependencyResolution = ResolutionScope.RUNTIME)
-public class CreateArchiveMojo extends MojoSupport {
-
-    /**
-     * The target directory of the project.
-     */
-    @Parameter(defaultValue="${project.build.directory}")
-    private File destDir;
-
-    /**
-     * The location of the server repository.
-     */
-    @Parameter(defaultValue="${project.build.directory}/assembly")
-    private File targetServerDirectory;
-
-    /**
-     * The target file to set as the project's artifact.
-     */
-    @Parameter(defaultValue="${project.artifactId}-${project.version}")
-    private File targetFile;
-
-    /**
-     * pack a assembly as a tar.gz archive
-     */
-    @Parameter
-    private boolean archiveTarGz = true;
-
-    /**
-     * pack a assembly as a zip archive
-     */
-    @Parameter
-    private boolean archiveZip = true;
-
-    /**
-     * use symbolic links in tar.gz or zip archives
-     *
-     * Symbolic links are not very well supported by windows Platform.
-     * At least, is does not work on WinXP + NTFS, so do not include them
-     * for now. So the default is false.
-     */
-    @Parameter
-    private boolean useSymLinks = false;
-
-    public void execute() throws MojoExecutionException, MojoFailureException {
-        getLog().debug("Setting artifact file: " + targetFile);
-        org.apache.maven.artifact.Artifact artifact = project.getArtifact();
-        artifact.setFile(targetFile);
-        try {
-            //now pack up the server.
-            if(archiveTarGz){
-                archive("tar.gz");
-            }
-            if(archiveZip) {
-                archive("zip");
-            }
-        } catch (Exception e) {
-            throw new MojoExecutionException("Could not archive plugin", e);
-        }
-    }
-
-    @SuppressWarnings("deprecation")
-	private void archive(String type) throws IOException {
-        Artifact artifact1 = factory.createArtifactWithClassifier(project.getArtifact().getGroupId(), project.getArtifact().getArtifactId(), project.getArtifact().getVersion(), type, "bin");
-        File target1 = archive(targetServerDirectory, destDir, artifact1);
-        projectHelper.attachArtifact( project, artifact1.getType(), null, target1 );
-    }
-
-    public File archive(File source, File dest, Artifact artifact) throws //ArchiverException,
-            IOException {
-        String serverName = null;
-        if (targetFile != null && project.getPackaging().equals("karaf-assembly")) {
-            serverName = targetFile.getName();
-        } else {
-           serverName = artifact.getArtifactId() + "-" + artifact.getVersion();
-        }
-        dest = new File(dest, serverName + "." + artifact.getType());
-
-        if ("tar.gz".equals(artifact.getType())) {
-            try (
-                    OutputStream fOut = Files.newOutputStream(dest.toPath());
-                    OutputStream bOut = new BufferedOutputStream(fOut);
-                    OutputStream gzOut = new GzipCompressorOutputStream(bOut);
-                    TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);
-                    DirectoryStream<Path> children = Files.newDirectoryStream(source.toPath())
-
-            ) {
-                tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
-                tOut.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
-                for (Path child : children) {
-                    addFileToTarGz(tOut, child, serverName + "/");
-                }
-            }
-        } else if ("zip".equals(artifact.getType())) {
-            try (
-                    OutputStream fOut = Files.newOutputStream(dest.toPath());
-                    OutputStream bOut = new BufferedOutputStream(fOut);
-                    ZipArchiveOutputStream tOut = new ZipArchiveOutputStream(bOut);
-                    DirectoryStream<Path> children = Files.newDirectoryStream(source.toPath())
-
-            ) {
-                for (Path child : children) {
-                    addFileToZip(tOut, child, serverName + "/");
-                }
-            }
-        } else {
-            throw new IllegalArgumentException("Unknown target type: " + artifact.getType());
-        }
-
-        return dest;
-    }
-
-    private void addFileToTarGz(TarArchiveOutputStream tOut, Path f, String base) throws IOException {
-        if (Files.isDirectory(f)) {
-            String entryName = base + f.getFileName().toString() + "/";
-            TarArchiveEntry tarEntry = new TarArchiveEntry(entryName);
-            tOut.putArchiveEntry(tarEntry);
-            tOut.closeArchiveEntry();
-            try (DirectoryStream<Path> children = Files.newDirectoryStream(f)) {
-                for (Path child : children) {
-                    addFileToTarGz(tOut, child, entryName);
-                }
-            }
-        } else if (useSymLinks && Files.isSymbolicLink(f)) {
-            String entryName = base + f.getFileName().toString();
-            TarArchiveEntry tarEntry = new TarArchiveEntry(entryName, TarConstants.LF_SYMLINK);
-            tarEntry.setLinkName(Files.readSymbolicLink(f).toString());
-            tOut.putArchiveEntry(tarEntry);
-            tOut.closeArchiveEntry();
-        }  else {
-            String entryName = base + f.getFileName().toString();
-            TarArchiveEntry tarEntry = new TarArchiveEntry(entryName);
-            tarEntry.setSize(Files.size(f));
-            if (entryName.contains("/bin/")) {
-                tarEntry.setMode(0755);
-                if (entryName.endsWith(".bat")) {
-                    return;
-                }
-            }
-            tOut.putArchiveEntry(tarEntry);
-            Files.copy(f, tOut);
-            tOut.closeArchiveEntry();
-        }
-    }
-
-    private void addFileToZip(ZipArchiveOutputStream tOut, Path f, String base) throws IOException {
-        if (Files.isDirectory(f)) {
-            String entryName = base + f.getFileName().toString() + "/";
-            ZipArchiveEntry zipEntry = new ZipArchiveEntry(entryName);
-            tOut.putArchiveEntry(zipEntry);
-            tOut.closeArchiveEntry();
-            try (DirectoryStream<Path> children = Files.newDirectoryStream(f)) {
-                for (Path child : children) {
-                    addFileToZip(tOut, child, entryName);
-                }
-            }
-        } else if (useSymLinks && Files.isSymbolicLink(f)) {
-            String entryName = base + f.getFileName().toString();
-            ZipArchiveEntry zipEntry = new ZipArchiveEntry(entryName);
-            zipEntry.setUnixMode(UnixStat.LINK_FLAG | UnixStat.DEFAULT_FILE_PERM);
-            tOut.putArchiveEntry(zipEntry);
-            tOut.write(Files.readSymbolicLink(f).toString().getBytes());
-            tOut.closeArchiveEntry();
-        }  else {
-            String entryName = base + f.getFileName().toString();
-            ZipArchiveEntry zipEntry = new ZipArchiveEntry(entryName);
-            zipEntry.setSize(Files.size(f));
-            if (entryName.contains("/bin/")) {
-                if (!entryName.endsWith(".bat")) {
-                    return;
-                }
-                zipEntry.setUnixMode(0755);
-            }
-            tOut.putArchiveEntry(zipEntry);
-            Files.copy(f, tOut);
-            tOut.closeArchiveEntry();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/karaf/blob/056239dc/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/BlueprintURLHandler.java
----------------------------------------------------------------------
diff --git a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/BlueprintURLHandler.java b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/BlueprintURLHandler.java
deleted file mode 100644
index f123e5c..0000000
--- a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/BlueprintURLHandler.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.karaf.tooling.url;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLConnection;
-import java.net.URLStreamHandler;
-
-import org.apache.karaf.deployer.blueprint.BlueprintTransformer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * As org.apache.karaf.deployer.blueprint.BlueprintURLHandler need run with OSGi container
- * so create this class only used for features-maven-plugin
- */
-public class BlueprintURLHandler extends URLStreamHandler {
-
-	private final Logger logger = LoggerFactory.getLogger(BlueprintURLHandler.class);
-
-	private static String SYNTAX = "blueprint: bp-xml-uri";
-
-	private URL blueprintXmlURL;
-
-    /**
-     * Open the connection for the given URL.
-     *
-     * @param url the url from which to open a connection.
-     * @return a connection on the specified URL.
-     * @throws IOException if an error occurs or if the URL is malformed.
-     */
-    @Override
-	public URLConnection openConnection(URL url) throws IOException {
-		if (url.getPath() == null || url.getPath().trim().length() == 0) {
-			throw new MalformedURLException ("Path cannot be null or empty. Syntax: " + SYNTAX );
-		}
-		blueprintXmlURL = new URL(url.getPath());
-
-		logger.debug("Blueprint xml URL is: [" + blueprintXmlURL + "]");
-		return new Connection(url);
-	}
-	
-	public URL getBlueprintXmlURL() {
-		return blueprintXmlURL;
-	}
-
-    public class Connection extends URLConnection {
-
-        public Connection(URL url) {
-            super(url);
-        }
-
-        @Override
-        public void connect() throws IOException {
-        }
-
-        @Override
-        public InputStream getInputStream() throws IOException {
-            try {
-                ByteArrayOutputStream os = new ByteArrayOutputStream();
-                BlueprintTransformer.transform(blueprintXmlURL, os);
-                os.close();
-                return new ByteArrayInputStream(os.toByteArray());
-            } catch (Exception e) {
-                logger.error("Error opening blueprint xml url", e);
-                throw (IOException) new IOException("Error opening blueprint xml url").initCause(e);
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/karaf/blob/056239dc/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/CustomBundleURLStreamHandlerFactory.java
----------------------------------------------------------------------
diff --git a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/CustomBundleURLStreamHandlerFactory.java b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/CustomBundleURLStreamHandlerFactory.java
deleted file mode 100644
index d6fc156..0000000
--- a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/CustomBundleURLStreamHandlerFactory.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.karaf.tooling.url;
-
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.net.URL;
-import java.net.URLConnection;
-import java.net.URLStreamHandler;
-import java.net.URLStreamHandlerFactory;
-import java.util.Hashtable;
-
-import org.ops4j.pax.url.mvn.MavenResolver;
-import org.ops4j.pax.url.mvn.MavenResolvers;
-import org.ops4j.pax.url.mvn.ServiceConstants;
-import org.ops4j.pax.url.mvn.internal.AetherBasedResolver;
-import org.ops4j.pax.url.mvn.internal.Connection;
-import org.ops4j.pax.url.mvn.internal.config.MavenConfigurationImpl;
-import shaded.org.ops4j.util.property.PropertiesPropertyResolver;
-
-public class CustomBundleURLStreamHandlerFactory implements URLStreamHandlerFactory {
-
-	private static final String MVN_URI_PREFIX = "mvn";
-	private static final String WRAP_URI_PREFIX = "wrap";
-    private static final String FEATURE_URI_PREFIX = "feature";
-    private static final String SPRING_URI_PREFIX = "spring";
-    private static final String BLUEPRINT_URI_PREFIX = "blueprint";
-    private static final String WAR_URI_PREFIX = "war";
-
-	private final MavenResolver mavenResolver;
-
-	public CustomBundleURLStreamHandlerFactory() {
-		this(null);
-	}
-
-	public CustomBundleURLStreamHandlerFactory(MavenResolver mavenResolver) {
-		this.mavenResolver = mavenResolver;
-	}
-
-	public URLStreamHandler createURLStreamHandler(String protocol) {
-		if (protocol.equals(MVN_URI_PREFIX)) {
-			return new URLStreamHandler() {
-				@Override
-				protected URLConnection openConnection(URL u) throws IOException {
-					MavenResolver resolver = mavenResolver;
-					if (resolver == null) {
-						PropertiesPropertyResolver propertyResolver = new PropertiesPropertyResolver(System.getProperties());
-						final MavenConfigurationImpl config = new MavenConfigurationImpl(propertyResolver, ServiceConstants.PID);
-						resolver = new AetherBasedResolver(config);
-					}
-					return new Connection(u, resolver);
-				}
-			};
-		} else if (protocol.equals(WRAP_URI_PREFIX)){
-			return new org.ops4j.pax.url.wrap.Handler();
-		} else if (protocol.equals(FEATURE_URI_PREFIX)){
-			return new FeatureURLHandler();
-		} else if (protocol.equals(SPRING_URI_PREFIX)){
-			return new SpringURLHandler();
-		} else if (protocol.equals(BLUEPRINT_URI_PREFIX)){
-			return new BlueprintURLHandler();
-        } else if (protocol.equals(WAR_URI_PREFIX)) {
-            return new WarURLHandler();
-		} else {
-			return null;
-		}
-	}
-
-	public static void install() {
-		install(null);
-	}
-
-	public static void install(MavenResolver mavenResolver) {
-		uninstall();
-		URL.setURLStreamHandlerFactory(new CustomBundleURLStreamHandlerFactory(mavenResolver));
-	}
-
-	public static void uninstall() {
-		try {
-			Field handlersField = URL.class.getDeclaredField("handlers");
-			Field factoryField = URL.class.getDeclaredField("factory");
-			factoryField.setAccessible(true);
-			factoryField.set(null, null);
-			handlersField.setAccessible(true);
-			handlersField.set(null, new Hashtable());
-		} catch (Throwable t) {
-			t.printStackTrace();
-		}
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/karaf/blob/056239dc/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/FeatureURLHandler.java
----------------------------------------------------------------------
diff --git a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/FeatureURLHandler.java b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/FeatureURLHandler.java
deleted file mode 100644
index a995f60..0000000
--- a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/FeatureURLHandler.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.karaf.tooling.url;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLConnection;
-import java.net.URLStreamHandler;
-
-import org.apache.karaf.deployer.features.FeatureTransformer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * As org.apache.karaf.deployer.features.FeatureURLHandler needs to be run with
- * the OSGi container this class was created for use by the karaf-maven-plugin
- */
-public class FeatureURLHandler extends URLStreamHandler {
-
-    private final Logger logger = LoggerFactory.getLogger(FeatureURLHandler.class);
-
-    private static String SYNTAX = "feature: xml-uri";
-
-    private URL featureXmlURL;
-
-    /**
-     * Open the connection for the given URL.
-     *
-     * @param url the url from which to open a connection.
-     * @return a connection on the specified URL.
-     * @throws java.io.IOException if an error occurs or if the URL is malformed.
-     */
-    @Override
-    public URLConnection openConnection(URL url) throws IOException {
-        if (url.getPath() == null || url.getPath().trim().length() == 0) {
-            throw new MalformedURLException("Path can not be null or empty. Syntax: " + SYNTAX );
-        }
-        featureXmlURL = new URL(url.getPath());
-
-        logger.debug("Features xml URL is: [" + featureXmlURL + "]");
-        return new Connection(url);
-    }
-
-    public URL getFeatureXmlURL() {
-        return featureXmlURL;
-    }
-
-    public class Connection extends URLConnection {
-
-        public Connection(URL url) {
-            super(url);
-        }
-
-        @Override
-        public void connect() throws IOException {
-        }
-
-        @Override
-        public InputStream getInputStream() throws IOException {
-            try {
-                ByteArrayOutputStream os = new ByteArrayOutputStream();
-                FeatureTransformer.transform(featureXmlURL, os);
-                os.close();
-                return new ByteArrayInputStream(os.toByteArray());
-            } catch (Exception e) {
-                logger.error("Error opening features xml url", e);
-                throw (IOException) new IOException("Error opening features xml url").initCause(e);
-            }
-        }
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/karaf/blob/056239dc/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/SpringURLHandler.java
----------------------------------------------------------------------
diff --git a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/SpringURLHandler.java b/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/SpringURLHandler.java
deleted file mode 100644
index 603aee6..0000000
--- a/tooling/karaf-maven-plugin/src/main/java/org/apache/karaf/tooling/url/SpringURLHandler.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.karaf.tooling.url;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLConnection;
-import java.net.URLStreamHandler;
-
-import org.apache.karaf.deployer.spring.SpringTransformer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * As org.apache.karaf.deployer.spring.SpringURLHandler needs to be run with
- * the OSGi container this class was created for use by the karaf-maven-plugin
- */
-public class SpringURLHandler extends URLStreamHandler {
-
-	private final Logger logger = LoggerFactory.getLogger(SpringURLHandler.class);
-
-	private static String SYNTAX = "spring: spring-xml-uri";
-
-	private URL springXmlURL;
-
-    /**
-     * Open the connection for the given URL.
-     *
-     * @param url the url from which to open a connection.
-     * @return a connection on the specified URL.
-     * @throws IOException if an error occurs or if the URL is malformed.
-     */
-    @Override
-	public URLConnection openConnection(URL url) throws IOException {
-		if (url.getPath() == null || url.getPath().trim().length() == 0) {
-			throw new MalformedURLException ("Path cannot be null or empty. Syntax: " + SYNTAX );
-		}
-		springXmlURL = new URL(url.getPath());
-
-		logger.debug("Spring xml URL is: [" + springXmlURL + "]");
-		return new Connection(url);
-	}
-	
-	public URL getSpringXmlURL() {
-		return springXmlURL;
-	}
-
-    public class Connection extends URLConnection {
-
-        public Connection(URL url) {
-            super(url);
-        }
-
-        @Override
-        public void connect() throws IOException {
-        }
-
-        @Override
-        public InputStream getInputStream() throws IOException {
-            try {
-                ByteArrayOutputStream os = new ByteArrayOutputStream();
-                SpringTransformer.transform(springXmlURL, os);
-                os.close();
-                return new ByteArrayInputStream(os.toByteArray());
-            } catch (Exception e) {
-                logger.error("Error opening Spring xml url", e);
-                throw (IOException) new IOException("Error opening Spring xml url").initCause(e);
-            }
-        }
-    }
-
-}