You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by lm...@apache.org on 2017/09/23 15:04:53 UTC

[1/4] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Repository: knox
Updated Branches:
  refs/heads/master 3a0119b21 -> c2ca44326


http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
new file mode 100644
index 0000000..fb563fa
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.Service;
+import org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
+
+import java.io.*;
+import java.util.*;
+
+
+/**
+ * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
+ * gateway.
+ */
+public class SimpleDescriptorHandler {
+
+    private static final Service[] NO_GATEWAY_SERVICES = new Service[]{};
+
+    private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class);
+
+    public static Map<String, File> handle(File desc) throws IOException {
+        return handle(desc, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(File desc, Service...gatewayServices) throws IOException {
+        return handle(desc, desc.getParentFile(), gatewayServices);
+    }
+
+    public static Map<String, File> handle(File desc, File destDirectory) throws IOException {
+        return handle(desc, destDirectory, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(File desc, File destDirectory, Service...gatewayServices) throws IOException {
+        return handle(SimpleDescriptorFactory.parse(desc.getAbsolutePath()), desc.getParentFile(), destDirectory, gatewayServices);
+    }
+
+    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory) {
+        return handle(desc, srcDirectory, destDirectory, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) {
+        Map<String, File> result = new HashMap<>();
+
+        File topologyDescriptor;
+
+        DefaultServiceDiscoveryConfig sdc = new DefaultServiceDiscoveryConfig(desc.getDiscoveryAddress());
+        sdc.setUser(desc.getDiscoveryUser());
+        sdc.setPasswordAlias(desc.getDiscoveryPasswordAlias());
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(desc.getDiscoveryType(), gatewayServices);
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
+
+        Map<String, List<String>> serviceURLs = new HashMap<>();
+
+        if (cluster != null) {
+            for (SimpleDescriptor.Service descService : desc.getServices()) {
+                String serviceName = descService.getName();
+
+                List<String> descServiceURLs = descService.getURLs();
+                if (descServiceURLs == null || descServiceURLs.isEmpty()) {
+                    descServiceURLs = cluster.getServiceURLs(serviceName);
+                }
+
+                // If there is at least one URL associated with the service, then add it to the map
+                if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
+                    serviceURLs.put(serviceName, descServiceURLs);
+                } else {
+                    log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
+                    throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName +
+                                                    ". Topology update aborted!");
+                }
+            }
+        } else {
+            log.failedToDiscoverClusterServices(desc.getClusterName());
+        }
+
+        topologyDescriptor = null;
+        File providerConfig = null;
+        try {
+            // Verify that the referenced provider configuration exists before attempting to reading it
+            providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory);
+            if (providerConfig == null) {
+                log.failedToResolveProviderConfigRef(desc.getProviderConfig());
+                throw new IllegalArgumentException("Unresolved provider configuration reference: " +
+                                                   desc.getProviderConfig() + " ; Topology update aborted!");
+            }
+            result.put("reference", providerConfig);
+
+            // TODO: Should the contents of the provider config be validated before incorporating it into the topology?
+
+            String topologyFilename = desc.getName();
+            if (topologyFilename == null) {
+                topologyFilename = desc.getClusterName();
+            }
+            topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
+            FileWriter fw = new FileWriter(topologyDescriptor);
+
+            fw.write("<topology>\n");
+
+            // Copy the externalized provider configuration content into the topology descriptor in-line
+            InputStreamReader policyReader = new InputStreamReader(new FileInputStream(providerConfig));
+            char[] buffer = new char[1024];
+            int count;
+            while ((count = policyReader.read(buffer)) > 0) {
+                fw.write(buffer, 0, count);
+            }
+            policyReader.close();
+
+            // Write the service declarations
+            for (String serviceName : serviceURLs.keySet()) {
+                fw.write("    <service>\n");
+                fw.write("        <role>" + serviceName + "</role>\n");
+                for (String url : serviceURLs.get(serviceName)) {
+                    fw.write("        <url>" + url + "</url>\n");
+                }
+                fw.write("    </service>\n");
+            }
+
+            fw.write("</topology>\n");
+
+            fw.flush();
+            fw.close();
+        } catch (IOException e) {
+            log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
+            topologyDescriptor.delete();
+        }
+
+        result.put("topology", topologyDescriptor);
+        return result;
+    }
+
+
+    private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
+        File providerConfig;
+
+        // If the reference includes a path
+        if (reference.contains(File.separator)) {
+            // Check if it's an absolute path
+            providerConfig = new File(reference);
+            if (!providerConfig.exists()) {
+                // If it's not an absolute path, try treating it as a relative path
+                providerConfig = new File(srcDirectory, reference);
+                if (!providerConfig.exists()) {
+                    providerConfig = null;
+                }
+            }
+        } else { // No file path, just a name
+            // Check if it's co-located with the referencing descriptor
+            providerConfig = new File(srcDirectory, reference);
+            if (!providerConfig.exists()) {
+                // Check the shared-providers config location
+                File sharedProvidersDir = new File(srcDirectory, "../shared-providers");
+                if (sharedProvidersDir.exists()) {
+                    providerConfig = new File(sharedProvidersDir, reference);
+                    if (!providerConfig.exists()) {
+                        // Check if it's a valid name without the extension
+                        providerConfig = new File(sharedProvidersDir, reference + ".xml");
+                        if (!providerConfig.exists()) {
+                            providerConfig = null;
+                        }
+                    }
+                }
+            }
+        }
+
+        return providerConfig;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
new file mode 100644
index 0000000..32ceba9
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.ArrayList;
+import java.util.List;
+
+class SimpleDescriptorImpl implements SimpleDescriptor {
+
+    @JsonProperty("discovery-type")
+    private String discoveryType;
+
+    @JsonProperty("discovery-address")
+    private String discoveryAddress;
+
+    @JsonProperty("discovery-user")
+    private String discoveryUser;
+
+    @JsonProperty("discovery-pwd-alias")
+    private String discoveryPasswordAlias;
+
+    @JsonProperty("provider-config-ref")
+    private String providerConfig;
+
+    @JsonProperty("cluster")
+    private String cluster;
+
+    @JsonProperty("services")
+    private List<ServiceImpl> services;
+
+    private String name = null;
+
+    void setName(String name) {
+        this.name = name;
+    }
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+    @Override
+    public String getDiscoveryType() {
+        return discoveryType;
+    }
+
+    @Override
+    public String getDiscoveryAddress() {
+        return discoveryAddress;
+    }
+
+    @Override
+    public String getDiscoveryUser() {
+        return discoveryUser;
+    }
+
+    @Override
+    public String getDiscoveryPasswordAlias() {
+        return discoveryPasswordAlias;
+    }
+
+    @Override
+    public String getClusterName() {
+        return cluster;
+    }
+
+    @Override
+    public String getProviderConfig() {
+        return providerConfig;
+    }
+
+    @Override
+    public List<Service> getServices() {
+        List<Service> result = new ArrayList<>();
+        result.addAll(services);
+        return result;
+    }
+
+    public static class ServiceImpl implements Service {
+        private String name;
+        private List<String> urls;
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public List<String> getURLs() {
+            return urls;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
new file mode 100644
index 0000000..cf9aa28
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.simple")
+public interface SimpleDescriptorMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Service discovery for cluster {0} failed.")
+    void failedToDiscoverClusterServices(final String cluster);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No URLs were discovered for {0} in the {1} cluster.")
+    void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Failed to resolve the referenced provider configuration {0}.")
+    void failedToResolveProviderConfigRef(final String providerConfigRef);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Error generating topology {0} from simple descriptor: {1}")
+    void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
+                                                      @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
index 55cd5cc..498d750 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
@@ -22,8 +22,12 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.monitor.FileAlterationMonitor;
 import org.apache.commons.io.monitor.FileAlterationObserver;
 import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService;
 import org.apache.hadoop.gateway.topology.*;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
 import org.apache.hadoop.test.TestUtils;
 import org.easymock.EasyMock;
 import org.junit.After;
@@ -36,6 +40,8 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.*;
 
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.isA;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.Matchers.hasItem;
 import static org.hamcrest.core.IsNull.notNullValue;
@@ -78,9 +84,17 @@ public class DefaultTopologyServiceTest {
   public void testGetTopologies() throws Exception {
 
     File dir = createDir();
-    long time = dir.lastModified();
+    File topologyDir = new File(dir, "topologies");
+
+    File descriptorsDir = new File(dir, "descriptors");
+    descriptorsDir.mkdirs();
+
+    File sharedProvidersDir = new File(dir, "shared-providers");
+    sharedProvidersDir.mkdirs();
+
+    long time = topologyDir.lastModified();
     try {
-      createFile(dir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
+      createFile(topologyDir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
 
       TestTopologyListener topoListener = new TestTopologyListener();
       FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
@@ -89,17 +103,16 @@ public class DefaultTopologyServiceTest {
       Map<String, String> c = new HashMap<>();
 
       GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(dir.toString()).anyTimes();
+      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
       EasyMock.replay(config);
 
       provider.init(config, c);
 
-
       provider.addTopologyChangeListener(topoListener);
 
       provider.reloadTopologies();
 
-
       Collection<Topology> topologies = provider.getTopologies();
       assertThat(topologies, notNullValue());
       assertThat(topologies.size(), is(1));
@@ -110,7 +123,7 @@ public class DefaultTopologyServiceTest {
       topoListener.events.clear();
 
       // Add a file to the directory.
-      File two = createFile(dir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-two.xml", 1L);
+      File two = createFile(topologyDir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-two.xml", 1L);
       provider.reloadTopologies();
       topologies = provider.getTopologies();
       assertThat(topologies.size(), is(2));
@@ -131,7 +144,7 @@ public class DefaultTopologyServiceTest {
       assertThat(event.getTopology(), notNullValue());
 
       // Update a file in the directory.
-      two = createFile(dir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-three.xml", 2L);
+      two = createFile(topologyDir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-three.xml", 2L);
       provider.reloadTopologies();
       topologies = provider.getTopologies();
       assertThat(topologies.size(), is(2));
@@ -153,6 +166,49 @@ public class DefaultTopologyServiceTest {
       topology = topologies.iterator().next();
       assertThat(topology.getName(), is("one"));
       assertThat(topology.getTimestamp(), is(time));
+
+      // Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006)
+      // N.B. This part of the test depends on the DummyServiceDiscovery extension being configured:
+      //         org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+      AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+      EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes();
+      EasyMock.replay(aliasService);
+      DefaultTopologyService.DescriptorsMonitor dm =
+                                          new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
+
+      // Write out the referenced provider config first
+      File provCfgFile = createFile(sharedProvidersDir,
+                                    "ambari-cluster-policy.xml",
+                                    "org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml",
+                                    1L);
+      try {
+        // Create the simple descriptor in the descriptors dir
+        File simpleDesc =
+                createFile(descriptorsDir,
+                           "four.json",
+                           "org/apache/hadoop/gateway/topology/file/simple-topology-four.json",
+                           1L);
+
+        // Trigger the topology generation by noticing the simple descriptor
+        dm.onFileChange(simpleDesc);
+
+        // Load the generated topology
+        provider.reloadTopologies();
+        topologies = provider.getTopologies();
+        assertThat(topologies.size(), is(2));
+        names = new HashSet<>(Arrays.asList("one", "four"));
+        iterator = topologies.iterator();
+        topology = iterator.next();
+        assertThat(names, hasItem(topology.getName()));
+        names.remove(topology.getName());
+        topology = iterator.next();
+        assertThat(names, hasItem(topology.getName()));
+        names.remove(topology.getName());
+        assertThat(names.size(), is(0));
+      } finally {
+        provCfgFile.delete();
+
+      }
     } finally {
       FileUtils.deleteQuietly(dir);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
new file mode 100644
index 0000000..269bed2
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
+import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.junit.Assert.*;
+
+
+public class PropertiesFileServiceDiscoveryTest {
+
+    private static final Map<String, String> clusterProperties = new HashMap<>();
+    static {
+        clusterProperties.put("mycluster.name", "mycluster");
+        clusterProperties.put("mycluster.NAMENODE", "hdfs://namenodehost:8020");
+        clusterProperties.put("mycluster.JOBTRACKER", "rpc://jobtrackerhostname:8050");
+        clusterProperties.put("mycluster.WEBHCAT", "http://webhcathost:50111/templeton");
+        clusterProperties.put("mycluster.OOZIE", "http://ooziehost:11000/oozie");
+        clusterProperties.put("mycluster.HIVE", "http://hivehostname:10001/clipath");
+        clusterProperties.put("mycluster.RESOURCEMANAGER", "http://remanhost:8088/ws");
+    }
+
+    private static final Properties config = new Properties();
+    static {
+        for (String name : clusterProperties.keySet()) {
+            config.setProperty(name, clusterProperties.get(name));
+        }
+    }
+
+
+    @Test
+    public void testPropertiesFileServiceDiscovery() throws Exception {
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get("PROPERTIES_FILE");
+        assertNotNull(sd);
+
+        String discoveryAddress = this.getClass().getName() + "__test-discovery-source.properties";
+        File discoverySource = new File(discoveryAddress);
+        try {
+            config.store(new FileOutputStream(discoverySource), "Test discovery source for PropertiesFileServiceDiscovery");
+
+            ServiceDiscovery.Cluster c =
+                        sd.discover(new DefaultServiceDiscoveryConfig(discoverySource.getAbsolutePath()), "mycluster");
+            assertNotNull(c);
+            for (String name : clusterProperties.keySet()) {
+                assertEquals(clusterProperties.get(name), c.getServiceURLs(name.split("\\.")[1]).get(0));
+            }
+        } finally {
+            discoverySource.delete();
+        }
+    }
+
+
+    private void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            String value = "";
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    value += url + " ";
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, value));
+        }
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
new file mode 100644
index 0000000..d592ede
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
+import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+
+import static org.junit.Assert.*;
+
+
+public class ServiceDiscoveryFactoryTest {
+
+    @Test
+    public void testGetDummyImpl() throws Exception {
+        String TYPE = "DUMMY";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
+    }
+
+
+    @Test
+    public void testGetDummyImplWithMismatchedCase() throws Exception {
+        String TYPE = "dUmmY";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE.toUpperCase(), sd.getType());
+    }
+
+
+    @Test
+    public void testGetInvalidImpl() throws Exception {
+        String TYPE = "InValID";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNull("Unexpected ServiceDiscovery object.", sd);
+    }
+
+
+    @Test
+    public void testGetImplWithMismatchedType() throws Exception {
+        String TYPE = "DeclaredType";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNull("Unexpected ServiceDiscovery object.", sd);
+    }
+
+
+    @Test
+    public void testGetPropertiesFileImplWithAliasServiceInjection() throws Exception {
+        String TYPE = "PROPERTIES_FILE";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE, new DefaultAliasService());
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
+
+        // Verify that the AliasService was injected as expected
+        Field aliasServiceField = sd.getClass().getDeclaredField("aliasService");
+        aliasServiceField.setAccessible(true);
+        Object fieldValue = aliasServiceField.get(sd);
+        assertNotNull(fieldValue);
+        assertTrue(AliasService.class.isAssignableFrom(fieldValue.getClass()));
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
new file mode 100644
index 0000000..4a5323e
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This implementation is intended to be used by tests for which the actual service URLs are of no importance, such that
+ * tests can be written without having a valid service registry (e.g., Ambari) available.
+ */
+public class DummyServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "DUMMY";
+
+    private static final Cluster DUMMY = new Cluster() {
+        @Override
+        public String getName() {
+            return "dummy";
+        }
+
+        @Override
+        public List<String> getServiceURLs(String serviceName) {
+            return Collections.singletonList("http://servicehost:9999/dummy");
+        }
+    };
+
+    private static final Map<String, Cluster> CLUSTERS = new HashMap<>();
+    static {
+        CLUSTERS.put(DUMMY.getName(), DUMMY);
+    }
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        return CLUSTERS;
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        return DUMMY;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
new file mode 100644
index 0000000..d47c38d
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class DummyServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+        return DummyServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new DummyServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
new file mode 100644
index 0000000..a7fc34a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.*;
+
+class PropertiesFileServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "PROPERTIES_FILE";
+
+    @GatewayService
+    AliasService aliasService;
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+    @Override
+    public Map<String, ServiceDiscovery.Cluster> discover(ServiceDiscoveryConfig config) {
+
+        Map<String, ServiceDiscovery.Cluster> result = new HashMap<>();
+
+        Properties p = new Properties();
+        try {
+            p.load(new FileInputStream(config.getAddress()));
+
+            Map<String, Map<String, List<String>>> clusters = new HashMap<>();
+            for (Object key : p.keySet()) {
+                String propertyKey = (String)key;
+                String[] parts = propertyKey.split("\\.");
+                if (parts.length == 2) {
+                    String clusterName = parts[0];
+                    String serviceName = parts[1];
+                    String serviceURL  = p.getProperty(propertyKey);
+                    if (!clusters.containsKey(clusterName)) {
+                        clusters.put(clusterName, new HashMap<String, List<String>>());
+                    }
+                    Map<String, List<String>> serviceURLs = clusters.get(clusterName);
+                    if (!serviceURLs.containsKey(serviceName)) {
+                        serviceURLs.put(serviceName, new ArrayList<String>());
+                    }
+                    serviceURLs.get(serviceName).add(serviceURL);
+                }
+            }
+
+            for (String clusterName : clusters.keySet()) {
+                result.put(clusterName,
+                        new PropertiesFileServiceDiscovery.Cluster(clusterName, clusters.get(clusterName)));
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        return result;
+    }
+
+
+    @Override
+    public ServiceDiscovery.Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        Map<String, ServiceDiscovery.Cluster> clusters = discover(config);
+        return clusters.get(clusterName);
+    }
+
+
+    static class Cluster implements ServiceDiscovery.Cluster {
+        private String name;
+        private Map<String, List<String>> serviceURLS = new HashMap<>();
+
+        Cluster(String name, Map<String, List<String>> serviceURLs) {
+            this.name = name;
+            this.serviceURLS.putAll(serviceURLs);
+        }
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public List<String> getServiceURLs(String serviceName) {
+            return serviceURLS.get(serviceName);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
new file mode 100644
index 0000000..2cfd998
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class PropertiesFileServiceDiscoveryType implements ServiceDiscoveryType {
+
+    @Override
+    public String getType() {
+        return PropertiesFileServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new PropertiesFileServiceDiscovery();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
new file mode 100644
index 0000000..8f7b71a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.Map;
+
+public class SneakyServiceDiscoveryImpl implements ServiceDiscovery {
+    @Override
+    public String getType() {
+        return "ActualType";
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        return null;
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
new file mode 100644
index 0000000..97665dc
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class SneakyServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+        return "DeclaredType";
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new SneakyServiceDiscoveryImpl();
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
new file mode 100644
index 0000000..3dac66a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.Writer;
+import java.util.*;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+
+public class SimpleDescriptorFactoryTest {
+
+
+    @Test
+    public void testParseJSONSimpleDescriptor() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "admin";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        String fileName = "test-topology.json";
+        File testJSON = null;
+        try {
+            testJSON = writeJSON(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testJSON != null) {
+                try {
+                    testJSON.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+    @Test
+    public void testParseYAMLSimpleDescriptor() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "joeblow";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        String fileName = "test-topology.yml";
+        File testYAML = null;
+        try {
+            testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testYAML != null) {
+                try {
+                    testYAML.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+
+    private void validateSimpleDescriptor(SimpleDescriptor    sd,
+                                          String              discoveryType,
+                                          String              discoveryAddress,
+                                          String              providerConfig,
+                                          String              clusterName,
+                                          Map<String, List<String>> expectedServices) {
+        assertNotNull(sd);
+        assertEquals(discoveryType, sd.getDiscoveryType());
+        assertEquals(discoveryAddress, sd.getDiscoveryAddress());
+        assertEquals(providerConfig, sd.getProviderConfig());
+        assertEquals(clusterName, sd.getClusterName());
+
+        List<SimpleDescriptor.Service> actualServices = sd.getServices();
+
+        assertEquals(expectedServices.size(), actualServices.size());
+
+        for (SimpleDescriptor.Service actualService : actualServices) {
+            assertTrue(expectedServices.containsKey(actualService.getName()));
+            assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs());
+        }
+    }
+
+
+    private File writeJSON(String path, String content) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write(content);
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+
+    private File writeJSON(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write("{" + "\n");
+        fw.write("\"discovery-type\":\"" + discoveryType + "\",\n");
+        fw.write("\"discovery-address\":\"" + discoveryAddress + "\",\n");
+        fw.write("\"discovery-user\":\"" + discoveryUser + "\",\n");
+        fw.write("\"provider-config-ref\":\"" + providerConfig + "\",\n");
+        fw.write("\"cluster\":\"" + clusterName + "\",\n");
+        fw.write("\"services\":[\n");
+
+        int i = 0;
+        for (String name : services.keySet()) {
+            fw.write("{\"name\":\"" + name + "\"");
+            List<String> urls = services.get(name);
+            if (urls != null) {
+                fw.write(", \"urls\":[");
+                Iterator<String> urlIter = urls.iterator();
+                while (urlIter.hasNext()) {
+                    fw.write("\"" + urlIter.next() + "\"");
+                    if (urlIter.hasNext()) {
+                        fw.write(", ");
+                    }
+                }
+                fw.write("]");
+            }
+            fw.write("}");
+            if (i++ < services.size() - 1) {
+                fw.write(",");
+            }
+            fw.write("\n");
+        }
+        fw.write("]\n");
+        fw.write("}\n");
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+    private File writeYAML(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write("---" + "\n");
+        fw.write("discovery-type: " + discoveryType + "\n");
+        fw.write("discovery-address: " + discoveryAddress + "\n");
+        fw.write("discovery-user: " + discoveryUser + "\n");
+        fw.write("provider-config-ref: " + providerConfig + "\n");
+        fw.write("cluster: " + clusterName+ "\n");
+        fw.write("services:\n");
+        for (String name : services.keySet()) {
+            fw.write("    - name: " + name + "\n");
+            List<String> urls = services.get(name);
+            if (urls != null) {
+                fw.write("      urls:\n");
+                for (String url : urls) {
+                    fw.write("          - " + url + "\n");
+                }
+            }
+        }
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
new file mode 100644
index 0000000..90c7146
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
+import org.apache.hadoop.gateway.util.XmlUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathFactory;
+import java.io.*;
+import java.util.*;
+
+import static org.junit.Assert.*;
+
+
+public class SimpleDescriptorHandlerTest {
+
+    private static final String TEST_PROVIDER_CONFIG =
+            "    <gateway>\n" +
+                    "        <provider>\n" +
+                    "            <role>authentication</role>\n" +
+                    "            <name>ShiroProvider</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "            <param>\n" +
+                    "                <!-- \n" +
+                    "                session timeout in minutes,  this is really idle timeout,\n" +
+                    "                defaults to 30mins, if the property value is not defined,, \n" +
+                    "                current client authentication would expire if client idles contiuosly for more than this value\n" +
+                    "                -->\n" +
+                    "                <name>sessionTimeout</name>\n" +
+                    "                <value>30</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm</name>\n" +
+                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapContextFactory</name>\n" +
+                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory</name>\n" +
+                    "                <value>$ldapContextFactory</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.userDnTemplate</name>\n" +
+                    "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory.url</name>\n" +
+                    "                <value>ldap://localhost:33389</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+                    "                <value>simple</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>urls./**</name>\n" +
+                    "                <value>authcBasic</value>\n" +
+                    "            </param>\n" +
+                    "        </provider>\n" +
+                    "\n" +
+                    "        <provider>\n" +
+                    "            <role>identity-assertion</role>\n" +
+                    "            <name>Default</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "        </provider>\n" +
+                    "\n" +
+                    "        <!--\n" +
+                    "        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
+                    "        For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
+                    "        some AWS internal host name.  If the client needs to make a subsequent request to the host identified\n" +
+                    "        in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
+                    "\n" +
+                    "        If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
+                    "        enabled parameter as false.\n" +
+                    "\n" +
+                    "        The name parameter specifies the external host names in a comma separated list.\n" +
+                    "        The value parameter specifies corresponding internal host names in a comma separated list.\n" +
+                    "\n" +
+                    "        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
+                    "        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the\n" +
+                    "        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.\n" +
+                    "        -->\n" +
+                    "        <provider>\n" +
+                    "            <role>hostmap</role>\n" +
+                    "            <name>static</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+                    "        </provider>\n" +
+                    "    </gateway>\n";
+
+
+    /**
+     * KNOX-1006
+     *
+     * N.B. This test depends on the DummyServiceDiscovery extension being configured:
+     *             org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+     */
+    @Test
+    public void testSimpleDescriptorHandler() throws Exception {
+
+        final String type = "DUMMY";
+        final String address = "http://c6401.ambari.apache.org:8080";
+        final String clusterName = "dummy";
+        final Map<String, List<String>> serviceURLs = new HashMap<>();
+        serviceURLs.put("NAMENODE", null);
+        serviceURLs.put("JOBTRACKER", null);
+        serviceURLs.put("WEBHDFS", null);
+        serviceURLs.put("WEBHCAT", null);
+        serviceURLs.put("OOZIE", null);
+        serviceURLs.put("WEBHBASE", null);
+        serviceURLs.put("HIVE", null);
+        serviceURLs.put("RESOURCEMANAGER", null);
+        serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        // Write the externalized provider config to a temp file
+        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
+
+        File topologyFile = null;
+        try {
+            File destDir = (new File(".")).getCanonicalFile();
+
+            // Mock out the simple descriptor
+            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+            EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
+            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+            for (String serviceName : serviceURLs.keySet()) {
+                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+                EasyMock.replay(svc);
+                serviceMocks.add(svc);
+            }
+            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+            EasyMock.replay(testDescriptor);
+
+            // Invoke the simple descriptor handler
+            Map<String, File> files =
+                           SimpleDescriptorHandler.handle(testDescriptor,
+                                                          providerConfig.getParentFile(), // simple desc co-located with provider config
+                                                          destDir);
+            topologyFile = files.get("topology");
+
+            // Validate the resulting topology descriptor
+            assertTrue(topologyFile.exists());
+
+            // Validate the topology descriptor's correctness
+            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
+            if( !validator.validateTopology() ){
+                throw new SAXException( validator.getErrorString() );
+            }
+
+            XPathFactory xPathfactory = XPathFactory.newInstance();
+            XPath xpath = xPathfactory.newXPath();
+
+            // Parse the topology descriptor
+            Document topologyXml = XmlUtils.readXml(topologyFile);
+
+            // Validate the provider configuration
+            Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
+            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+            assertTrue("Resulting provider config should be identical to the referenced content.",
+                       extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
+
+            // Validate the service declarations
+            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+            NodeList serviceNodes =
+                        (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
+            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
+                Node serviceNode = serviceNodes.item(serviceNodeIndex);
+                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+                assertNotNull(roleNode);
+                String role = roleNode.getNodeValue();
+                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
+                    Node urlNode = urlNodes.item(urlNodeIndex);
+                    assertNotNull(urlNode);
+                    String url = urlNode.getNodeValue();
+                    assertNotNull("Every declared service should have a URL.", url);
+                    if (!topologyServiceURLs.containsKey(role)) {
+                        topologyServiceURLs.put(role, new ArrayList<String>());
+                    }
+                    topologyServiceURLs.get(role).add(url);
+                }
+            }
+            assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            fail(e.getMessage());
+        } finally {
+            providerConfig.delete();
+            if (topologyFile != null) {
+                topologyFile.delete();
+            }
+        }
+    }
+
+
+    private File writeProviderConfig(String path, String content) throws IOException {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write(content);
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..82a6f86
--- /dev/null
+++ b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,21 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.test.extension.SneakyServiceDiscoveryType
+org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType
+org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscoveryType
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
new file mode 100644
index 0000000..8223bea
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
@@ -0,0 +1,74 @@
+<gateway>
+    <provider>
+        <role>authentication</role>
+        <name>ShiroProvider</name>
+        <enabled>true</enabled>
+        <param>
+            <!--
+            session timeout in minutes,  this is really idle timeout,
+            defaults to 30mins, if the property value is not defined,,
+            current client authentication would expire if client idles contiuosly for more than this value
+            -->
+            <name>sessionTimeout</name>
+            <value>30</value>
+        </param>
+        <param>
+            <name>main.ldapRealm</name>
+            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+        </param>
+        <param>
+            <name>main.ldapContextFactory</name>
+            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory</name>
+            <value>$ldapContextFactory</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.userDnTemplate</name>
+            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory.url</name>
+            <value>ldap://localhost:33389</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+            <value>simple</value>
+        </param>
+        <param>
+            <name>urls./**</name>
+            <value>authcBasic</value>
+        </param>
+    </provider>
+
+    <provider>
+        <role>identity-assertion</role>
+        <name>Default</name>
+        <enabled>true</enabled>
+    </provider>
+
+    <!--
+    Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
+    For example, a hadoop service running in AWS may return a response that includes URLs containing the
+    some AWS internal host name.  If the client needs to make a subsequent request to the host identified
+    in those URLs they need to be mapped to external host names that the client Knox can use to connect.
+
+    If the external hostname and internal host names are same turn of this provider by setting the value of
+    enabled parameter as false.
+
+    The name parameter specifies the external host names in a comma separated list.
+    The value parameter specifies corresponding internal host names in a comma separated list.
+
+    Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
+    of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the
+    Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.
+    -->
+    <provider>
+        <role>hostmap</role>
+        <name>static</name>
+        <enabled>true</enabled>
+        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+    </provider>
+
+</gateway>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
new file mode 100644
index 0000000..45407a7
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
@@ -0,0 +1,18 @@
+{
+  "discovery-type":"DUMMY",
+  "discovery-address":"http://c6401.ambari.apache.org:8080",
+  "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml",
+  "cluster":"dummy",
+  "services":[
+    {"name":"NAMENODE"},
+    {"name":"JOBTRACKER"},
+    {"name":"WEBHDFS"},
+    {"name":"WEBHCAT"},
+    {"name":"OOZIE"},
+    {"name":"WEBHBASE"},
+    {"name":"HIVE"},
+    {"name":"RESOURCEMANAGER"},
+    {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]},
+    {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]}
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
new file mode 100644
index 0000000..554ddbe
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import java.lang.annotation.*;
+
+/**
+ * This annotation can be used to inject gateway services into a ServiceDiscovery implementation.
+ */
+@Documented
+@Target(ElementType.FIELD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface GatewayService {
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
new file mode 100644
index 0000000..eefa30b
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ * Implementations provide the means by which Hadoop service endpoint URLs are discovered from a source with knowledge
+ * about the service topology of one or more clusters.
+ */
+public interface ServiceDiscovery {
+
+    /**
+     * This is the type specified in a simple descriptor to indicate which ServiceDiscovery implementation to employ.
+     *
+     * @return The identifier for the service discovery type.
+     */
+    String getType();
+
+
+    /**
+     * Discover details of all the clusters known to the target registry.
+     *
+     * @param config The configuration for the discovery invocation
+     *
+     * @return A Map of the discovered service data, keyed by the cluster name.
+     */
+    Map<String, Cluster> discover(ServiceDiscoveryConfig config);
+
+
+    /**
+     * Discover details for a single cluster.
+     *
+     * @param config The configuration for the discovery invocation
+     * @param clusterName The name of a particular cluster
+     *
+     * @return The discovered service data for the specified cluster
+     */
+    Cluster discover(ServiceDiscoveryConfig config, String clusterName);
+
+
+    /**
+     * A handle to the service discovery result.
+     */
+    interface Cluster {
+
+        /**
+         * @return The name of the cluster
+         */
+        String getName();
+
+        /**
+         * @param serviceName The name of the service
+         * @return The URLs for the specified service in this cluster.
+         */
+        List<String> getServiceURLs(String serviceName);
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
new file mode 100644
index 0000000..6b2e741
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+/**
+ * ServiceDiscovery configuration details.
+ */
+public interface ServiceDiscoveryConfig {
+
+    /**
+     *
+     * @return The address of the discovery source.
+     */
+    String getAddress();
+
+    /**
+     *
+     * @return The username configured for interactions with the discovery source.
+     */
+    String getUser();
+
+    /**
+     *
+     * @return The alias for the password required for interactions with the discovery source.
+     */
+    String getPasswordAlias();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
new file mode 100644
index 0000000..cddced1
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+/**
+ * ServiceDiscovery extensions must implement this interface to add support for a new discovery source.
+ *
+ * The ServiceLoader mechanism is used to include ServiceDiscovery extensions, and implementations of this interface
+ * are the providers.
+ */
+public interface ServiceDiscoveryType {
+
+    /**
+     *
+     * @return The identifier for the discovery type.
+     */
+    String getType();
+
+
+    /**
+     *
+     * @return A new instance of the ServiceDiscovery implementation provided by this type.
+     */
+    ServiceDiscovery newInstance();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d2f4176..2708f6b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -45,6 +45,7 @@
         <module>gateway-i18n-logging-log4j</module>
         <module>gateway-i18n-logging-sl4j</module>
         <module>gateway-spi</module>
+        <module>gateway-discovery-ambari</module>
         <module>gateway-server</module>
         <module>gateway-server-launcher</module>
         <module>gateway-server-xforwarded-filter</module>
@@ -684,6 +685,11 @@
             </dependency>
             <dependency>
                 <groupId>${gateway-group}</groupId>
+                <artifactId>gateway-discovery-ambari</artifactId>
+                <version>${gateway-version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${gateway-group}</groupId>
                 <artifactId>gateway-release</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
@@ -702,17 +708,16 @@
                 <artifactId>gateway-shell-samples</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
-
-        <dependency>
-            <groupId>org.picketlink</groupId>
-            <artifactId>picketlink-federation</artifactId>
-            <version>2.7.0.CR3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.jboss.logging</groupId>
-            <artifactId>jboss-logging</artifactId>
-            <version>3.2.0.Final</version>
-        </dependency>
+            <dependency>
+                <groupId>org.picketlink</groupId>
+                <artifactId>picketlink-federation</artifactId>
+                <version>2.7.0.CR3</version>
+            </dependency>
+            <dependency>
+                <groupId>org.jboss.logging</groupId>
+                <artifactId>jboss-logging</artifactId>
+                <version>3.2.0.Final</version>
+            </dependency>
             <dependency>
                 <groupId>org.glassfish.jersey.containers</groupId>
                 <artifactId>jersey-container-servlet</artifactId>


[2/4] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by lm...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
new file mode 100644
index 0000000..1e5e7b2
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -0,0 +1,856 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+
+/**
+ * Test the Ambari ServiceDiscovery implementation.
+ *
+ * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
+ *      treatment of the responses as they were observed at the time the tests are developed.
+ */
+public class AmbariServiceDiscoveryTest {
+
+    @Test
+    public void testSingleClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "testCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster);
+    }
+
+
+    @Test
+    public void testBulkClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "anotherCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
+        assertNotNull(clusters);
+        assertEquals(1, clusters.size());
+        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
+        final String[] services = new String[]{"NAMENODE",
+                                               "JOBTRACKER",
+                                               "WEBHDFS",
+                                               "WEBHCAT",
+                                               "OOZIE",
+                                               "WEBHBASE",
+                                               "HIVE",
+                                               "RESOURCEMANAGER"};
+        printServiceURLs(cluster, services);
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            StringBuilder sb = new StringBuilder();
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    sb.append(url);
+                    sb.append(" ");
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, sb.toString()));
+        }
+    }
+
+
+    /**
+     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
+     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
+     */
+    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
+
+        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
+
+        private Map<String, JSONObject> cannedResponses = new HashMap<>();
+
+        TestAmbariServiceDiscovery(String clusterName) {
+            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
+                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                               clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
+                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
+                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                     clusterName)));
+        }
+
+        @Override
+        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+            return cannedResponses.get(url.substring(url.indexOf("/api")));
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////
+    //  JSON response templates, based on actual response content excerpts
+    ////////////////////////////////////////////////////////////////////////
+
+    private static final String CLUSTERS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"Clusters\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"version\" : \"HDP-2.6\"\n" +
+    "      }\n" +
+    "    }\n" +
+    "  ]" +
+    "}";
+
+
+    private static final String HOSTROLES_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HBASE_MASTER\",\n" +
+    "            \"service_name\" : \"HBASE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HBASE_MASTER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HBASE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HDFS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NAMENODE\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HIVE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HCAT\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HCAT\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"OOZIE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "            \"service_name\" : \"OOZIE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"OOZIE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"YARN\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NODEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NODEMANAGER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "                \"ha_state\" : \"ACTIVE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"ZOOKEEPER\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "            \"service_name\" : \"ZOOKEEPER\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}\n";
+
+
+    private static final String SERVICECONFIGS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hbase-site\",\n" +
+    "          \"tag\" : \"version1503410563715\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
+    "            \"hbase.master.info.port\" : \"16010\",\n" +
+    "            \"hbase.master.port\" : \"16000\",\n" +
+    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
+    "            \"hbase.regionserver.port\" : \"16020\",\n" +
+    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
+    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
+    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
+    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
+    "      \"service_name\" : \"HBASE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hdfs-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
+    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
+    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
+    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
+    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
+    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"dfs.https.port\" : \"50470\",\n" +
+    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
+    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
+    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
+    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
+    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
+    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
+    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
+    "              \"dfs.namenode.http-address\" : \"true\",\n" +
+    "              \"dfs.support.append\" : \"true\",\n" +
+    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
+    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
+    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"core-site\",\n" +
+    "          \"tag\" : \"version1502131215159\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
+    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"fs.defaultFS\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 2,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HDFS\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-env\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive_security_authorization\" : \"None\",\n" +
+    "            \"webhcat_user\" : \"hcat\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hiveserver2-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
+    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
+    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
+    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
+    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
+    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
+    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
+    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
+    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
+    "            \"hive.server2.webui.port\" : \"10502\",\n" +
+    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
+    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
+    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-site\",\n" +
+    "          \"tag\" : \"version1502130841736\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
+    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
+    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
+    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
+    "            \"hive.server2.authentication\" : \"NONE\",\n" +
+    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
+    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
+    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
+    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
+    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
+    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
+    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
+    "            \"hive.server2.transport.mode\" : \"http\",\n" +
+    "            \"hive.server2.use.SSL\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"hidden\" : {\n" +
+    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"webhcat-site\",\n" +
+    "          \"tag\" : \"version1502131111746\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"templeton.port\" : \"50111\",\n" +
+    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
+    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502131110745,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HIVE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"oozie-site\",\n" +
+    "          \"tag\" : \"version1502131137103\",\n" +
+    "          \"version\" : 3,\n" +
+    "          \"properties\" : {\n" +
+    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_name\" : \"OOZIE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502122253525,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
+    "      \"service_name\" : \"TEZ\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"yarn-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
+    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"yarn.acl.enable\" : \"false\",\n" +
+    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
+    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
+    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
+    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
+    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
+    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
+    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
+    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
+    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
+    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_name\" : \"YARN\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/home/conf/descriptors/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/descriptors/README b/gateway-release/home/conf/descriptors/README
new file mode 100644
index 0000000..a2e5226
--- /dev/null
+++ b/gateway-release/home/conf/descriptors/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SIMPLE TOPOLOGY DESCRIPTORS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/home/conf/shared-providers/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/shared-providers/README b/gateway-release/home/conf/shared-providers/README
new file mode 100644
index 0000000..44d12a3
--- /dev/null
+++ b/gateway-release/home/conf/shared-providers/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SHARED PROVIDER CONFIGURATIONS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 83824cd..cbff307 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -334,6 +334,10 @@
             <groupId>${gateway-group}</groupId>
             <artifactId>gateway-shell-samples</artifactId>
         </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-discovery-ambari</artifactId>
+        </dependency>
 
         <dependency>
             <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 437d22d..5ebf793 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -248,6 +248,11 @@
            <artifactId>javax-websocket-client-impl</artifactId>
         </dependency>
 
+        <dependency>
+            <groupId>com.fasterxml.jackson.dataformat</groupId>
+            <artifactId>jackson-dataformat-yaml</artifactId>
+            <version>2.3.0</version>
+        </dependency>
 
         <!-- ********** ********** ********** ********** ********** ********** -->
         <!-- ********** Test Dependencies                           ********** -->

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index 1f94584..6f73c1e 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -511,6 +511,11 @@ public interface GatewayMessages {
                " Could not find topology \"{0}\" mapped to port \"{1}\" configured in gateway-config.xml. "
                    + "This invalid topology mapping will be ignored by the gateway. "
                    + "Gateway restart will be required if in the future \"{0}\" topology is added.")
-  void topologyPortMappingCannotFindTopology(final String topology,
-      final int port);
+  void topologyPortMappingCannotFindTopology(final String topology, final int port);
+
+
+  @Message( level = MessageLevel.ERROR, text = "An error occurred while processing {0} : {1}" )
+  void simpleDescriptorHandlingError(final String simpleDesc,
+                                     @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
index cefada1..02ac154 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
@@ -105,14 +105,13 @@ public class DefaultGatewayServices implements GatewayServices {
     services.put( SERVER_INFO_SERVICE, sis );
 
     DefaultTopologyService tops = new DefaultTopologyService();
+    tops.setAliasService(alias);
     tops.init(  config, options  );
     services.put(  TOPOLOGY_SERVICE, tops  );
 
     DefaultServiceDefinitionRegistry sdr = new DefaultServiceDefinitionRegistry();
     sdr.init( config, options );
     services.put( SERVICE_DEFINITION_REGISTRY, sdr );
-    tops.init( config, options );
-    services.put( TOPOLOGY_SERVICE, tops );
 
     DefaultMetricsService metricsService = new DefaultMetricsService();
     metricsService.init( config, options );

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
index 3321f3d..a493bc4 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.gateway.config.GatewayConfig;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 import org.apache.hadoop.gateway.service.definition.ServiceDefinition;
 import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.topology.TopologyService;
 import org.apache.hadoop.gateway.topology.Topology;
 import org.apache.hadoop.gateway.topology.TopologyEvent;
@@ -45,6 +46,7 @@ import org.apache.hadoop.gateway.topology.TopologyListener;
 import org.apache.hadoop.gateway.topology.TopologyMonitor;
 import org.apache.hadoop.gateway.topology.TopologyProvider;
 import org.apache.hadoop.gateway.topology.builder.TopologyBuilder;
+import org.apache.hadoop.gateway.topology.simple.SimpleDescriptorHandler;
 import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
 import org.apache.hadoop.gateway.topology.xml.AmbariFormatXmlTopologyRules;
 import org.apache.hadoop.gateway.topology.xml.KnoxFormatXmlTopologyRules;
@@ -74,20 +76,27 @@ import static org.apache.commons.digester3.binder.DigesterLoader.newLoader;
 public class DefaultTopologyService
     extends FileAlterationListenerAdaptor
     implements TopologyService, TopologyMonitor, TopologyProvider, FileFilter, FileAlterationListener {
+
   private static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor(
     AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
     AuditConstants.KNOX_COMPONENT_NAME);
+
   private static final List<String> SUPPORTED_TOPOLOGY_FILE_EXTENSIONS = new ArrayList<String>();
   static {
     SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("xml");
     SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("conf");
   }
+
   private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
   private static DigesterLoader digesterLoader = newLoader(new KnoxFormatXmlTopologyRules(), new AmbariFormatXmlTopologyRules());
-  private FileAlterationMonitor monitor;
-  private File directory;
+  private List<FileAlterationMonitor> monitors = new ArrayList<>();
+  private File topologiesDirectory;
+  private File descriptorsDirectory;
+
   private Set<TopologyListener> listeners;
   private volatile Map<File, Topology> topologies;
+  private AliasService aliasService;
+
 
   private Topology loadTopology(File file) throws IOException, SAXException, URISyntaxException, InterruptedException {
     final long TIMEOUT = 250; //ms
@@ -202,29 +211,40 @@ public class DefaultTopologyService
   }
 
   private File calculateAbsoluteTopologiesDir(GatewayConfig config) {
-
-    File topoDir = new File(config.getGatewayTopologyDir());
+    String normalizedTopologyDir = FilenameUtils.normalize(config.getGatewayTopologyDir());
+    File topoDir = new File(normalizedTopologyDir);
     topoDir = topoDir.getAbsoluteFile();
     return topoDir;
   }
 
-  private void initListener(FileAlterationMonitor monitor, File directory) {
-    this.directory = directory;
-    this.monitor = monitor;
+  private File calculateAbsoluteConfigDir(GatewayConfig config) {
+    File configDir = null;
 
+    String path = FilenameUtils.normalize(config.getGatewayConfDir());
+    if (path != null) {
+      configDir = new File(config.getGatewayConfDir());
+    } else {
+      configDir = (new File(config.getGatewayTopologyDir())).getParentFile();
+    }
+    configDir = configDir.getAbsoluteFile();
 
-    FileAlterationObserver observer = new FileAlterationObserver(this.directory, this);
-    observer.addListener(this);
-    monitor.addObserver(observer);
+    return configDir;
+  }
 
-    this.listeners = new HashSet<>();
-    this.topologies = new HashMap<>(); //loadTopologies( this.directory );
+  private void  initListener(FileAlterationMonitor  monitor,
+                            File                   directory,
+                            FileFilter             filter,
+                            FileAlterationListener listener) {
+    monitors.add(monitor);
+    FileAlterationObserver observer = new FileAlterationObserver(directory, filter);
+    observer.addListener(listener);
+    monitor.addObserver(observer);
   }
 
-  private void initListener(File directory) throws IOException, SAXException {
+  private void initListener(File directory, FileFilter filter, FileAlterationListener listener) throws IOException, SAXException {
     // Increasing the monitoring interval to 5 seconds as profiling has shown
     // this is rather expensive in terms of generated garbage objects.
-    initListener(new FileAlterationMonitor(5000L), directory);
+    initListener(new FileAlterationMonitor(5000L), directory, filter, listener);
   }
 
   private Map<File, Topology> loadTopologies(File directory) {
@@ -261,10 +281,14 @@ public class DefaultTopologyService
     return map;
   }
 
+  public void setAliasService(AliasService as) {
+    this.aliasService = as;
+  }
+
   public void deployTopology(Topology t){
 
     try {
-      File temp = new File(directory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
+      File temp = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
       Package topologyPkg = Topology.class.getPackage();
       String pkgName = topologyPkg.getName();
       String bindingFile = pkgName.replace(".", "/") + "/topology_binding-xml.xml";
@@ -277,7 +301,7 @@ public class DefaultTopologyService
       mr.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
       mr.marshal(t, temp);
 
-      File topology = new File(directory.getAbsolutePath() + "/" + t.getName() + ".xml");
+      File topology = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml");
       if(!temp.renameTo(topology)) {
         FileUtils.forceDelete(temp);
         throw new IOException("Could not rename temp file");
@@ -317,7 +341,7 @@ public class DefaultTopologyService
     try {
       synchronized (this) {
         Map<File, Topology> oldTopologies = topologies;
-        Map<File, Topology> newTopologies = loadTopologies(directory);
+        Map<File, Topology> newTopologies = loadTopologies(topologiesDirectory);
         List<TopologyEvent> events = createChangeEvents(oldTopologies, newTopologies);
         topologies = newTopologies;
         notifyChangeListeners(events);
@@ -329,7 +353,7 @@ public class DefaultTopologyService
   }
 
   public void deleteTopology(Topology t) {
-    File topoDir = directory;
+    File topoDir = topologiesDirectory;
 
     if(topoDir.isDirectory() && topoDir.canRead()) {
       File[] results = topoDir.listFiles();
@@ -357,8 +381,8 @@ public class DefaultTopologyService
   public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config) {
     File tFile = null;
     Map<String, List<String>> urls = new HashMap<>();
-    if(directory.isDirectory() && directory.canRead()) {
-      for(File f : directory.listFiles()){
+    if(topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
+      for(File f : topologiesDirectory.listFiles()){
         if(FilenameUtils.removeExtension(f.getName()).equals(t.getName())){
           tFile = f;
         }
@@ -387,12 +411,16 @@ public class DefaultTopologyService
 
   @Override
   public void startMonitor() throws Exception {
-    monitor.start();
+    for (FileAlterationMonitor monitor : monitors) {
+      monitor.start();
+    }
   }
 
   @Override
   public void stopMonitor() throws Exception {
-    monitor.stop();
+    for (FileAlterationMonitor monitor : monitors) {
+      monitor.stop();
+    }
   }
 
   @Override
@@ -414,6 +442,16 @@ public class DefaultTopologyService
 
   @Override
   public void onFileDelete(java.io.File file) {
+    // For full topology descriptors, we need to make sure to delete any corresponding simple descriptors to prevent
+    // unintended subsequent generation of the topology descriptor
+    for (String ext : DescriptorsMonitor.SUPPORTED_EXTENSIONS) {
+      File simpleDesc =
+              new File(descriptorsDirectory, FilenameUtils.getBaseName(file.getName()) + "." + ext);
+      if (simpleDesc.exists()) {
+        simpleDesc.delete();
+      }
+    }
+
     onFileChange(file);
   }
 
@@ -436,12 +474,200 @@ public class DefaultTopologyService
   public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
 
     try {
-      initListener(calculateAbsoluteTopologiesDir(config));
-    } catch (IOException io) {
+      listeners = new HashSet<>();
+      topologies = new HashMap<>();
+
+      topologiesDirectory = calculateAbsoluteTopologiesDir(config);
+
+      File configDirectory = calculateAbsoluteConfigDir(config);
+      descriptorsDirectory = new File(configDirectory, "descriptors");
+      File sharedProvidersDirectory = new File(configDirectory, "shared-providers");
+
+      // Add support for conf/topologies
+      initListener(topologiesDirectory, this, this);
+
+      // Add support for conf/descriptors
+      DescriptorsMonitor dm = new DescriptorsMonitor(topologiesDirectory, aliasService);
+      initListener(descriptorsDirectory,
+                   dm,
+                   dm);
+
+      // Add support for conf/shared-providers
+      SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(dm, descriptorsDirectory);
+      initListener(sharedProvidersDirectory, spm, spm);
+
+    } catch (IOException | SAXException io) {
       throw new ServiceLifecycleException(io.getMessage());
-    } catch (SAXException sax) {
-      throw new ServiceLifecycleException(sax.getMessage());
+    }
+  }
+
+
+  /**
+   * Change handler for simple descriptors
+   */
+  public static class DescriptorsMonitor extends FileAlterationListenerAdaptor
+                                          implements FileFilter {
+
+    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<String>();
+    static {
+      SUPPORTED_EXTENSIONS.add("json");
+      SUPPORTED_EXTENSIONS.add("yml");
+    }
+
+    private File topologiesDir;
+
+    private AliasService aliasService;
+
+    private Map<String, List<String>> providerConfigReferences = new HashMap<>();
+
+
+    public DescriptorsMonitor(File topologiesDir, AliasService aliasService) {
+      this.topologiesDir  = topologiesDir;
+      this.aliasService   = aliasService;
+    }
+
+    List<String> getReferencingDescriptors(String providerConfigPath) {
+      List<String> result = providerConfigReferences.get(providerConfigPath);
+      if (result == null) {
+        result = Collections.emptyList();
+      }
+      return result;
+    }
+
+    @Override
+    public void onFileCreate(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileDelete(File file) {
+      // For simple descriptors, we need to make sure to delete any corresponding full topology descriptors to trigger undeployment
+      for (String ext : DefaultTopologyService.SUPPORTED_TOPOLOGY_FILE_EXTENSIONS) {
+        File topologyFile =
+                new File(topologiesDir, FilenameUtils.getBaseName(file.getName()) + "." + ext);
+        if (topologyFile.exists()) {
+          topologyFile.delete();
+        }
+      }
+
+      String normalizedFilePath = FilenameUtils.normalize(file.getAbsolutePath());
+      String reference = null;
+      for (Map.Entry<String, List<String>> entry : providerConfigReferences.entrySet()) {
+        if (entry.getValue().contains(normalizedFilePath)) {
+          reference = entry.getKey();
+          break;
+        }
+      }
+      if (reference != null) {
+        providerConfigReferences.get(reference).remove(normalizedFilePath);
+      }
+    }
+
+    @Override
+    public void onFileChange(File file) {
+      try {
+        // When a simple descriptor has been created or modified, generate the new topology descriptor
+        Map<String, File> result = SimpleDescriptorHandler.handle(file, topologiesDir, aliasService);
+
+        // Add the provider config reference relationship for handling updates to the provider config
+        String providerConfig = FilenameUtils.normalize(result.get("reference").getAbsolutePath());
+        if (!providerConfigReferences.containsKey(providerConfig)) {
+          providerConfigReferences.put(providerConfig, new ArrayList<String>());
+        }
+        List<String> refs = providerConfigReferences.get(providerConfig);
+        String descriptorName = FilenameUtils.normalize(file.getAbsolutePath());
+        if (!refs.contains(descriptorName)) {
+          // Need to check if descriptor had previously referenced another provider config, so it can be removed
+          for (List<String> descs : providerConfigReferences.values()) {
+            if (descs.contains(descriptorName)) {
+              descs.remove(descriptorName);
+            }
+          }
+
+          // Add the current reference relationship
+          refs.add(descriptorName);
+        }
+      } catch (Exception e) {
+        log.simpleDescriptorHandlingError(file.getName(), e);
+      }
+    }
+
+    @Override
+    public boolean accept(File file) {
+      boolean accept = false;
+      if (!file.isDirectory() && file.canRead()) {
+        String extension = FilenameUtils.getExtension(file.getName());
+        if (SUPPORTED_EXTENSIONS.contains(extension)) {
+          accept = true;
+        }
+      }
+      return accept;
+    }
+  }
+
+  /**
+   * Change handler for shared provider configurations
+   */
+  public static class SharedProviderConfigMonitor extends FileAlterationListenerAdaptor
+          implements FileFilter {
+
+    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<>();
+    static {
+      SUPPORTED_EXTENSIONS.add("xml");
     }
 
+    private DescriptorsMonitor descriptorsMonitor;
+    private File descriptorsDir;
+
+
+    SharedProviderConfigMonitor(DescriptorsMonitor descMonitor, File descriptorsDir) {
+      this.descriptorsMonitor = descMonitor;
+      this.descriptorsDir     = descriptorsDir;
+    }
+
+    @Override
+    public void onFileCreate(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileDelete(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileChange(File file) {
+      // For shared provider configuration, we need to update any simple descriptors that reference it
+      for (File descriptor : getReferencingDescriptors(file)) {
+        descriptor.setLastModified(System.currentTimeMillis());
+      }
+    }
+
+    private List<File> getReferencingDescriptors(File sharedProviderConfig) {
+      List<File> references = new ArrayList<>();
+
+      for (File descriptor : descriptorsDir.listFiles()) {
+        if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
+          for (String reference : descriptorsMonitor.getReferencingDescriptors(FilenameUtils.normalize(sharedProviderConfig.getAbsolutePath()))) {
+            references.add(new File(reference));
+          }
+        }
+      }
+
+      return references;
+    }
+
+    @Override
+    public boolean accept(File file) {
+      boolean accept = false;
+      if (!file.isDirectory() && file.canRead()) {
+        String extension = FilenameUtils.getExtension(file.getName());
+        if (SUPPORTED_EXTENSIONS.contains(extension)) {
+          accept = true;
+        }
+      }
+      return accept;
+    }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index b5e80d2..a30cf13 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@ -77,7 +77,7 @@ public class BeanPropertyTopologyBuilder implements TopologyBuilder {
         Topology topology = new Topology();
         topology.setName(name);
 
-          for (Provider provider : providers) {
+        for (Provider provider : providers) {
             topology.addProvider(provider);
         }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
new file mode 100644
index 0000000..6534b5e
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+public class DefaultServiceDiscoveryConfig implements ServiceDiscoveryConfig {
+    private String address  = null;
+    private String user     = null;
+    private String pwdAlias = null;
+
+    public DefaultServiceDiscoveryConfig(String address) {
+        this.address = address;
+    }
+
+    public void setUser(String username) {
+        this.user = username;
+    }
+
+    public void setPasswordAlias(String alias) {
+        this.pwdAlias = alias;
+    }
+
+    public String getAddress() {
+        return address;
+    }
+
+    public String getUser() {
+        return user;
+    }
+
+    public String getPasswordAlias() {
+        return pwdAlias;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
new file mode 100644
index 0000000..70d5f61
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.Service;
+
+import java.lang.reflect.Field;
+import java.util.ServiceLoader;
+
+/**
+ * Creates instances of ServiceDiscovery implementations.
+ *
+ * This factory uses the ServiceLoader mechanism to load ServiceDiscovery implementations as extensions.
+ *
+ */
+public abstract class ServiceDiscoveryFactory {
+
+    private static final Service[] NO_GATEWAY_SERVICS = new Service[]{};
+
+
+    public static ServiceDiscovery get(String type) {
+        return get(type, NO_GATEWAY_SERVICS);
+    }
+
+
+    public static ServiceDiscovery get(String type, Service...gatewayServices) {
+        ServiceDiscovery sd  = null;
+
+        // Look up the available ServiceDiscovery types
+        ServiceLoader<ServiceDiscoveryType> loader = ServiceLoader.load(ServiceDiscoveryType.class);
+        for (ServiceDiscoveryType sdt : loader) {
+            if (sdt.getType().equalsIgnoreCase(type)) {
+                try {
+                    ServiceDiscovery instance = sdt.newInstance();
+                    // Make sure the type reported by the instance matches the type declared by the factory
+                    // (is this necessary?)
+                    if (instance.getType().equalsIgnoreCase(type)) {
+                        sd = instance;
+
+                        // Inject any gateway services that were specified, and which are referenced in the impl
+                        if (gatewayServices != null && gatewayServices.length > 0) {
+                            for (Field field : sd.getClass().getDeclaredFields()) {
+                                if (field.getDeclaredAnnotation(GatewayService.class) != null) {
+                                    for (Service s : gatewayServices) {
+                                        if (s != null) {
+                                            if (field.getType().isAssignableFrom(s.getClass())) {
+                                                field.setAccessible(true);
+                                                field.set(sd, s);
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                        break;
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return sd;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
new file mode 100644
index 0000000..aa28469
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import java.util.List;
+
+public interface SimpleDescriptor {
+
+    String getName();
+
+    String getDiscoveryType();
+
+    String getDiscoveryAddress();
+
+    String getDiscoveryUser();
+
+    String getDiscoveryPasswordAlias();
+
+    String getClusterName();
+
+    String getProviderConfig();
+
+    List<Service> getServices();
+
+
+    interface Service {
+        String getName();
+
+        List<String> getURLs();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
new file mode 100644
index 0000000..3df6d2f
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+
+public class SimpleDescriptorFactory {
+
+    /**
+     * Create a SimpleDescriptor from the specified file.
+     *
+     * @param path The path to the file.
+     * @return A SimpleDescriptor based on the contents of the file.
+     *
+     * @throws IOException
+     */
+    public static SimpleDescriptor parse(String path) throws IOException {
+        SimpleDescriptor sd;
+
+        if (path.endsWith(".json")) {
+            sd = parseJSON(path);
+        } else if (path.endsWith(".yml")) {
+            sd = parseYAML(path);
+        } else {
+           throw new IllegalArgumentException("Unsupported simple descriptor format: " + path.substring(path.lastIndexOf('.')));
+        }
+
+        return sd;
+    }
+
+
+    static SimpleDescriptor parseJSON(String path) throws IOException {
+        final ObjectMapper mapper = new ObjectMapper();
+        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
+        if (sd != null) {
+            sd.setName(FilenameUtils.getBaseName(path));
+        }
+        return sd;
+    }
+
+
+    static SimpleDescriptor parseYAML(String path) throws IOException {
+        final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
+        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
+        if (sd != null) {
+            sd.setName(FilenameUtils.getBaseName(path));
+        }
+        return sd;
+    }
+
+}


[4/4] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by lm...@apache.org.
KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c2ca4432
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c2ca4432
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c2ca4432

Branch: refs/heads/master
Commit: c2ca443262a848ae0d56e03e92ecba32fbf149f2
Parents: 3a0119b
Author: Larry McCay <lm...@hortonworks.com>
Authored: Sat Sep 23 11:04:39 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Sat Sep 23 11:04:39 2017 -0400

----------------------------------------------------------------------
 b/gateway-discovery-ambari/pom.xml              |  66 ++
 .../discovery/ambari/AmbariCluster.java         | 114 +++
 .../discovery/ambari/AmbariComponent.java       |  76 ++
 .../ambari/AmbariServiceDiscovery.java          | 291 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 ++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/AmbariServiceURLCreator.java         | 184 ++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 +++++++++++++++++++
 b/gateway-release/home/conf/descriptors/README  |   1 +
 .../home/conf/shared-providers/README           |   1 +
 gateway-discovery-ambari/pom.xml                |  66 ++
 .../discovery/ambari/AmbariCluster.java         | 114 +++
 .../discovery/ambari/AmbariComponent.java       |  76 ++
 .../ambari/AmbariServiceDiscovery.java          | 291 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 ++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/AmbariServiceURLCreator.java         | 184 ++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 +++++++++++++++++++
 gateway-release/home/conf/descriptors/README    |   1 +
 .../home/conf/shared-providers/README           |   1 +
 gateway-release/pom.xml                         |   4 +
 gateway-server/pom.xml                          |   5 +
 .../apache/hadoop/gateway/GatewayMessages.java  |   9 +-
 .../services/DefaultGatewayServices.java        |   3 +-
 .../topology/impl/DefaultTopologyService.java   | 278 +++++-
 .../builder/BeanPropertyTopologyBuilder.java    |   2 +-
 .../DefaultServiceDiscoveryConfig.java          |  48 ++
 .../discovery/ServiceDiscoveryFactory.java      |  81 ++
 .../topology/simple/SimpleDescriptor.java       |  46 +
 .../simple/SimpleDescriptorFactory.java         |  71 ++
 .../simple/SimpleDescriptorHandler.java         | 186 ++++
 .../topology/simple/SimpleDescriptorImpl.java   | 111 +++
 .../simple/SimpleDescriptorMessages.java        |  44 +
 .../topology/DefaultTopologyServiceTest.java    |  70 +-
 .../PropertiesFileServiceDiscoveryTest.java     |  90 ++
 .../discovery/ServiceDiscoveryFactoryTest.java  |  81 ++
 .../test/extension/DummyServiceDiscovery.java   |  66 ++
 .../extension/DummyServiceDiscoveryType.java    |  32 +
 .../PropertiesFileServiceDiscovery.java         | 108 +++
 .../PropertiesFileServiceDiscoveryType.java     |  35 +
 .../extension/SneakyServiceDiscoveryImpl.java   |  40 +
 .../extension/SneakyServiceDiscoveryType.java   |  33 +
 .../simple/SimpleDescriptorFactoryTest.java     | 218 +++++
 .../simple/SimpleDescriptorHandlerTest.java     | 239 ++++++
 ...eway.topology.discovery.ServiceDiscoveryType |  21 +
 .../topology/file/ambari-cluster-policy.xml     |  74 ++
 .../topology/file/simple-topology-four.json     |  18 +
 .../topology/discovery/GatewayService.java      |  29 +
 .../topology/discovery/ServiceDiscovery.java    |  76 ++
 .../discovery/ServiceDiscoveryConfig.java       |  42 +
 .../discovery/ServiceDiscoveryType.java         |  40 +
 pom.xml                                         |  27 +-
 54 files changed, 5626 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/pom.xml
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/pom.xml b/b/gateway-discovery-ambari/pom.xml
new file mode 100644
index 0000000..924e89c
--- /dev/null
+++ b/b/gateway-discovery-ambari/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.knox</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.14.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-discovery-ambari</artifactId>
+
+    <name>gateway-discovery-ambari</name>
+    <description>The extension to the gateway for service discovery using Apache Ambari.</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-spi</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
new file mode 100644
index 0000000..6eaabd3
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class AmbariCluster implements ServiceDiscovery.Cluster {
+
+    private String name = null;
+
+    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+    private Map<String, AmbariComponent> components = null;
+
+
+    AmbariCluster(String name) {
+        this.name = name;
+        components = new HashMap<String, AmbariComponent>();
+    }
+
+    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
+        if (!serviceConfigurations.keySet().contains(serviceName)) {
+            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+        }
+        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
+    }
+
+
+    void addComponent(AmbariComponent component) {
+        components.put(component.getName(), component);
+    }
+
+
+    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
+        ServiceConfiguration sc = null;
+        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+        if (configs != null) {
+            sc = configs.get(configurationType);
+        }
+        return sc;
+    }
+
+
+    Map<String, AmbariComponent> getComponents() {
+        return components;
+    }
+
+
+    AmbariComponent getComponent(String name) {
+        return components.get(name);
+    }
+
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+
+    @Override
+    public List<String> getServiceURLs(String serviceName) {
+        List<String> urls = new ArrayList<>();
+        urls.addAll(urlCreator.create(this, serviceName));
+        return urls;
+    }
+
+
+    static class ServiceConfiguration {
+
+        private String type;
+        private String version;
+        private Map<String, String> props;
+
+        ServiceConfiguration(String type, String version, Map<String, String> properties) {
+            this.type = type;
+            this.version = version;
+            this.props = properties;
+        }
+
+        public String getVersion() {
+            return version;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public Map<String, String> getProperties() {
+            return props;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
new file mode 100644
index 0000000..55257fb
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import java.util.List;
+import java.util.Map;
+
+class AmbariComponent {
+
+    private String clusterName = null;
+    private String serviceName = null;
+    private String name        = null;
+    private String version     = null;
+
+    private List<String> hostNames = null;
+
+    private Map<String, String> properties = null;
+
+    AmbariComponent(String              name,
+                    String              version,
+                    String              cluster,
+                    String              service,
+                    List<String>        hostNames,
+                    Map<String, String> properties) {
+        this.name = name;
+        this.serviceName = service;
+        this.clusterName = cluster;
+        this.version = version;
+        this.hostNames = hostNames;
+        this.properties = properties;
+    }
+
+    public String getVersion() {
+        return version;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String getServiceName() {
+        return serviceName;
+    }
+
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    public List<String> getHostNames() {
+        return hostNames;
+    }
+
+    public Map<String, String> getConfigProperties() {
+        return properties;
+    }
+
+    public String getConfigProperty(String propertyName) {
+        return properties.get(propertyName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
new file mode 100644
index 0000000..34f20a7
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.AliasServiceException;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+import java.util.*;
+
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "AMBARI";
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    // Map of component names to service configuration types
+    private static Map<String, String> componentServiceConfigs = new HashMap<>();
+    static {
+        componentServiceConfigs.put("NAMENODE", "hdfs-site");
+        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
+        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
+        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
+        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
+        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
+    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    @GatewayService
+    private AliasService aliasService;
+
+    private CloseableHttpClient httpClient = null;
+
+    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
+
+
+    AmbariServiceDiscovery() {
+        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+    }
+
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+        String discoveryAddress = config.getAddress();
+
+        // Invoke Ambari REST API to discover the available clusters
+        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+        // Parse the cluster names from the response, and perform the cluster discovery
+        JSONArray clusterItems = (JSONArray) json.get("items");
+        for (Object clusterItem : clusterItems) {
+            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+            try {
+                Cluster c = discover(config, clusterName);
+                clusters.put(clusterName, c);
+            } catch (Exception e) {
+                log.clusterDiscoveryError(clusterName, e);
+            }
+        }
+
+        return clusters;
+    }
+
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        AmbariCluster cluster = new AmbariCluster(clusterName);
+
+        Map<String, String> serviceComponents = new HashMap<>();
+
+        String discoveryAddress = config.getAddress();
+        String discoveryUser = config.getUser();
+        String discoveryPwdAlias = config.getPasswordAlias();
+
+        Map<String, List<String>> componentHostNames = new HashMap<>();
+        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+        if (hostRolesJSON != null) {
+            // Process the host roles JSON
+            JSONArray items = (JSONArray) hostRolesJSON.get("items");
+            for (Object obj : items) {
+                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+                for (Object component : components) {
+                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+                    for (Object hostComponent : hostComponents) {
+                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+                        String serviceName = (String) hostRoles.get("service_name");
+                        String componentName = (String) hostRoles.get("component_name");
+
+                        serviceComponents.put(componentName, serviceName);
+
+//                    String hostName = (String) hostRoles.get("host_name");
+                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
+                        log.discoveredServiceHost(serviceName, hostName);
+                        if (!componentHostNames.containsKey(componentName)) {
+                            componentHostNames.put(componentName, new ArrayList<String>());
+                        }
+                        componentHostNames.get(componentName).add(hostName);
+                    }
+                }
+            }
+        }
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<String, String>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                }
+            }
+        }
+
+        // Construct the AmbariCluster model
+        for (String componentName : serviceComponents.keySet()) {
+            String serviceName = serviceComponents.get(componentName);
+            List<String> hostNames = componentHostNames.get(componentName);
+
+            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+            String configType = componentServiceConfigs.get(componentName);
+            if (configType != null) {
+                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+                AmbariComponent c = new AmbariComponent(componentName,
+                                                        svcConfig.getVersion(),
+                                                        clusterName,
+                                                        serviceName,
+                                                        hostNames,
+                                                        svcConfig.getProperties());
+                cluster.addComponent(c);
+            }
+        }
+
+        return cluster;
+    }
+
+
+    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If not password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
new file mode 100644
index 0000000..caa16ed
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+public interface AmbariServiceDiscoveryMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error during cluster {0} discovery: {1}")
+    void clusterDiscoveryError(final String clusterName,
+                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation {0} failed: {1}")
+    void restInvocationError(final String url,
+                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+    void aliasServiceUserError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+    void aliasServicePasswordError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No user configured for Ambari service discovery.")
+    void aliasServiceUserNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No password configured for Ambari service discovery.")
+    void aliasServicePasswordNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Unexpected REST invocation response code for {0} : {1}")
+    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "REST invocation {0} yielded a response without any JSON.")
+    void noJSON(final String url);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation result: {0}")
+    void debugJSON(final String json);
+
+
+    @Message(level = MessageLevel.INFO,
+            text = "Discovered: Service: {0}, Host: {1}")
+    void discoveredServiceHost(final String serviceName, final String hostName);
+
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
new file mode 100644
index 0000000..723a786
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
+
+    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
+
+    @Override
+    public String getType() {
+        return AmbariServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new AmbariServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
new file mode 100644
index 0000000..0674642
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+import java.util.ArrayList;
+import java.util.List;
+
+class AmbariServiceURLCreator {
+
+    private static final String NAMENODE_SERVICE        = "NAMENODE";
+    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
+    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
+    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
+    private static final String OOZIE_SERVICE           = "OOZIE";
+    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
+    private static final String HIVE_SERVICE            = "HIVE";
+    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
+
+
+    /**
+     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
+     *
+     * @param cluster The cluster discovery results
+     * @param serviceName The name of a Hadoop service
+     *
+     * @return One or more endpoint URLs for the specified service.
+     */
+    public List<String> create(AmbariCluster cluster, String serviceName) {
+        List<String> result = null;
+
+        if (NAMENODE_SERVICE.equals(serviceName)) {
+            result = createNameNodeURL(cluster);
+        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
+            result = createJobTrackerURL(cluster);
+        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
+            result = createWebHDFSURL(cluster);
+        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
+            result = createWebHCatURL(cluster);
+        } else if (OOZIE_SERVICE.equals(serviceName)) {
+            result = createOozieURL(cluster);
+        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
+            result = createWebHBaseURL(cluster);
+        } else if (HIVE_SERVICE.equals(serviceName)) {
+            result = createHiveURL(cluster);
+        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
+            result = createResourceManagerURL(cluster);
+        }
+
+        return result;
+    }
+
+
+    private List<String> createNameNodeURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("NAMENODE");
+        if (comp != null) {
+            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createJobTrackerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
+        if (comp != null) {
+            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHDFSURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
+        if (sc != null) {
+            String address = sc.getProperties().get("dfs.namenode.http-address");
+            result.add("http://" + address + "/webhdfs");
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHCatURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
+        if (webhcat != null) {
+            String port = webhcat.getConfigProperty("templeton.port");
+            String host = webhcat.getHostNames().get(0);
+
+            result.add("http://" + host + ":" + port + "/templeton");
+        }
+        return result;
+    }
+
+
+    private List<String> createOozieURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
+        if (comp != null) {
+            result.add(comp.getConfigProperty("oozie.base.url"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHBaseURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
+        if (comp != null) {
+            for (String host : comp.getHostNames()) {
+                result.add("http://" + host + ":60080");
+            }
+        }
+
+        return result;
+    }
+
+
+    private List<String> createHiveURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
+        if (hive != null) {
+            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
+            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
+            String transport = hive.getConfigProperty("hive.server2.transport.mode");
+            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
+            String host = hive.getHostNames().get(0);
+
+            String scheme = null; // What is the scheme for the binary transport mode?
+            if ("http".equals(transport)) {
+                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
+            }
+
+            result.add(scheme + "://" + host + ":" + port + "/" + path);
+        }
+        return result;
+    }
+
+
+    private List<String> createResourceManagerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
+        if (resMan != null) {
+            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
+            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
+            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
+
+            result.add(scheme + "://" + webappAddress + "/ws");
+        }
+
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..1da4fc9
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file


[3/4] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by lm...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
new file mode 100644
index 0000000..1e5e7b2
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -0,0 +1,856 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+
+/**
+ * Test the Ambari ServiceDiscovery implementation.
+ *
+ * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
+ *      treatment of the responses as they were observed at the time the tests are developed.
+ */
+public class AmbariServiceDiscoveryTest {
+
+    @Test
+    public void testSingleClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "testCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster);
+    }
+
+
+    @Test
+    public void testBulkClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "anotherCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
+        assertNotNull(clusters);
+        assertEquals(1, clusters.size());
+        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
+        final String[] services = new String[]{"NAMENODE",
+                                               "JOBTRACKER",
+                                               "WEBHDFS",
+                                               "WEBHCAT",
+                                               "OOZIE",
+                                               "WEBHBASE",
+                                               "HIVE",
+                                               "RESOURCEMANAGER"};
+        printServiceURLs(cluster, services);
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            StringBuilder sb = new StringBuilder();
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    sb.append(url);
+                    sb.append(" ");
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, sb.toString()));
+        }
+    }
+
+
+    /**
+     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
+     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
+     */
+    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
+
+        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
+
+        private Map<String, JSONObject> cannedResponses = new HashMap<>();
+
+        TestAmbariServiceDiscovery(String clusterName) {
+            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
+                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                               clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
+                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
+                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                     clusterName)));
+        }
+
+        @Override
+        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+            return cannedResponses.get(url.substring(url.indexOf("/api")));
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////
+    //  JSON response templates, based on actual response content excerpts
+    ////////////////////////////////////////////////////////////////////////
+
+    private static final String CLUSTERS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"Clusters\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"version\" : \"HDP-2.6\"\n" +
+    "      }\n" +
+    "    }\n" +
+    "  ]" +
+    "}";
+
+
+    private static final String HOSTROLES_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HBASE_MASTER\",\n" +
+    "            \"service_name\" : \"HBASE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HBASE_MASTER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HBASE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HDFS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NAMENODE\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HIVE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HCAT\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HCAT\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"OOZIE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "            \"service_name\" : \"OOZIE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"OOZIE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"YARN\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NODEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NODEMANAGER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "                \"ha_state\" : \"ACTIVE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"ZOOKEEPER\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "            \"service_name\" : \"ZOOKEEPER\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}\n";
+
+
+    private static final String SERVICECONFIGS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hbase-site\",\n" +
+    "          \"tag\" : \"version1503410563715\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
+    "            \"hbase.master.info.port\" : \"16010\",\n" +
+    "            \"hbase.master.port\" : \"16000\",\n" +
+    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
+    "            \"hbase.regionserver.port\" : \"16020\",\n" +
+    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
+    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
+    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
+    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
+    "      \"service_name\" : \"HBASE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hdfs-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
+    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
+    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
+    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
+    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
+    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"dfs.https.port\" : \"50470\",\n" +
+    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
+    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
+    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
+    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
+    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
+    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
+    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
+    "              \"dfs.namenode.http-address\" : \"true\",\n" +
+    "              \"dfs.support.append\" : \"true\",\n" +
+    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
+    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
+    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"core-site\",\n" +
+    "          \"tag\" : \"version1502131215159\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
+    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"fs.defaultFS\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 2,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HDFS\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-env\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive_security_authorization\" : \"None\",\n" +
+    "            \"webhcat_user\" : \"hcat\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hiveserver2-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
+    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
+    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
+    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
+    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
+    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
+    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
+    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
+    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
+    "            \"hive.server2.webui.port\" : \"10502\",\n" +
+    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
+    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
+    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-site\",\n" +
+    "          \"tag\" : \"version1502130841736\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
+    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
+    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
+    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
+    "            \"hive.server2.authentication\" : \"NONE\",\n" +
+    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
+    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
+    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
+    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
+    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
+    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
+    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
+    "            \"hive.server2.transport.mode\" : \"http\",\n" +
+    "            \"hive.server2.use.SSL\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"hidden\" : {\n" +
+    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"webhcat-site\",\n" +
+    "          \"tag\" : \"version1502131111746\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"templeton.port\" : \"50111\",\n" +
+    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
+    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502131110745,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HIVE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"oozie-site\",\n" +
+    "          \"tag\" : \"version1502131137103\",\n" +
+    "          \"version\" : 3,\n" +
+    "          \"properties\" : {\n" +
+    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_name\" : \"OOZIE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502122253525,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
+    "      \"service_name\" : \"TEZ\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"yarn-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
+    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"yarn.acl.enable\" : \"false\",\n" +
+    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
+    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
+    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
+    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
+    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
+    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
+    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
+    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
+    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
+    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_name\" : \"YARN\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-release/home/conf/descriptors/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/descriptors/README b/b/gateway-release/home/conf/descriptors/README
new file mode 100644
index 0000000..a2e5226
--- /dev/null
+++ b/b/gateway-release/home/conf/descriptors/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SIMPLE TOPOLOGY DESCRIPTORS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-release/home/conf/shared-providers/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/shared-providers/README b/b/gateway-release/home/conf/shared-providers/README
new file mode 100644
index 0000000..44d12a3
--- /dev/null
+++ b/b/gateway-release/home/conf/shared-providers/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SHARED PROVIDER CONFIGURATIONS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/pom.xml b/gateway-discovery-ambari/pom.xml
new file mode 100644
index 0000000..924e89c
--- /dev/null
+++ b/gateway-discovery-ambari/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.knox</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.14.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-discovery-ambari</artifactId>
+
+    <name>gateway-discovery-ambari</name>
+    <description>The extension to the gateway for service discovery using Apache Ambari.</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-spi</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
new file mode 100644
index 0000000..6eaabd3
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class AmbariCluster implements ServiceDiscovery.Cluster {
+
+    private String name = null;
+
+    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+    private Map<String, AmbariComponent> components = null;
+
+
+    AmbariCluster(String name) {
+        this.name = name;
+        components = new HashMap<String, AmbariComponent>();
+    }
+
+    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
+        if (!serviceConfigurations.keySet().contains(serviceName)) {
+            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+        }
+        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
+    }
+
+
+    void addComponent(AmbariComponent component) {
+        components.put(component.getName(), component);
+    }
+
+
+    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
+        ServiceConfiguration sc = null;
+        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+        if (configs != null) {
+            sc = configs.get(configurationType);
+        }
+        return sc;
+    }
+
+
+    Map<String, AmbariComponent> getComponents() {
+        return components;
+    }
+
+
+    AmbariComponent getComponent(String name) {
+        return components.get(name);
+    }
+
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+
+    @Override
+    public List<String> getServiceURLs(String serviceName) {
+        List<String> urls = new ArrayList<>();
+        urls.addAll(urlCreator.create(this, serviceName));
+        return urls;
+    }
+
+
+    static class ServiceConfiguration {
+
+        private String type;
+        private String version;
+        private Map<String, String> props;
+
+        ServiceConfiguration(String type, String version, Map<String, String> properties) {
+            this.type = type;
+            this.version = version;
+            this.props = properties;
+        }
+
+        public String getVersion() {
+            return version;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public Map<String, String> getProperties() {
+            return props;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
new file mode 100644
index 0000000..55257fb
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import java.util.List;
+import java.util.Map;
+
+class AmbariComponent {
+
+    private String clusterName = null;
+    private String serviceName = null;
+    private String name        = null;
+    private String version     = null;
+
+    private List<String> hostNames = null;
+
+    private Map<String, String> properties = null;
+
+    AmbariComponent(String              name,
+                    String              version,
+                    String              cluster,
+                    String              service,
+                    List<String>        hostNames,
+                    Map<String, String> properties) {
+        this.name = name;
+        this.serviceName = service;
+        this.clusterName = cluster;
+        this.version = version;
+        this.hostNames = hostNames;
+        this.properties = properties;
+    }
+
+    public String getVersion() {
+        return version;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String getServiceName() {
+        return serviceName;
+    }
+
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    public List<String> getHostNames() {
+        return hostNames;
+    }
+
+    public Map<String, String> getConfigProperties() {
+        return properties;
+    }
+
+    public String getConfigProperty(String propertyName) {
+        return properties.get(propertyName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
new file mode 100644
index 0000000..34f20a7
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.AliasServiceException;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+import java.util.*;
+
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "AMBARI";
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    // Map of component names to service configuration types
+    private static Map<String, String> componentServiceConfigs = new HashMap<>();
+    static {
+        componentServiceConfigs.put("NAMENODE", "hdfs-site");
+        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
+        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
+        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
+        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
+        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
+    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    @GatewayService
+    private AliasService aliasService;
+
+    private CloseableHttpClient httpClient = null;
+
+    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
+
+
+    AmbariServiceDiscovery() {
+        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+    }
+
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+        String discoveryAddress = config.getAddress();
+
+        // Invoke Ambari REST API to discover the available clusters
+        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+        // Parse the cluster names from the response, and perform the cluster discovery
+        JSONArray clusterItems = (JSONArray) json.get("items");
+        for (Object clusterItem : clusterItems) {
+            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+            try {
+                Cluster c = discover(config, clusterName);
+                clusters.put(clusterName, c);
+            } catch (Exception e) {
+                log.clusterDiscoveryError(clusterName, e);
+            }
+        }
+
+        return clusters;
+    }
+
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        AmbariCluster cluster = new AmbariCluster(clusterName);
+
+        Map<String, String> serviceComponents = new HashMap<>();
+
+        String discoveryAddress = config.getAddress();
+        String discoveryUser = config.getUser();
+        String discoveryPwdAlias = config.getPasswordAlias();
+
+        Map<String, List<String>> componentHostNames = new HashMap<>();
+        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+        if (hostRolesJSON != null) {
+            // Process the host roles JSON
+            JSONArray items = (JSONArray) hostRolesJSON.get("items");
+            for (Object obj : items) {
+                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+                for (Object component : components) {
+                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+                    for (Object hostComponent : hostComponents) {
+                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+                        String serviceName = (String) hostRoles.get("service_name");
+                        String componentName = (String) hostRoles.get("component_name");
+
+                        serviceComponents.put(componentName, serviceName);
+
+//                    String hostName = (String) hostRoles.get("host_name");
+                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
+                        log.discoveredServiceHost(serviceName, hostName);
+                        if (!componentHostNames.containsKey(componentName)) {
+                            componentHostNames.put(componentName, new ArrayList<String>());
+                        }
+                        componentHostNames.get(componentName).add(hostName);
+                    }
+                }
+            }
+        }
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<String, String>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                }
+            }
+        }
+
+        // Construct the AmbariCluster model
+        for (String componentName : serviceComponents.keySet()) {
+            String serviceName = serviceComponents.get(componentName);
+            List<String> hostNames = componentHostNames.get(componentName);
+
+            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+            String configType = componentServiceConfigs.get(componentName);
+            if (configType != null) {
+                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+                AmbariComponent c = new AmbariComponent(componentName,
+                                                        svcConfig.getVersion(),
+                                                        clusterName,
+                                                        serviceName,
+                                                        hostNames,
+                                                        svcConfig.getProperties());
+                cluster.addComponent(c);
+            }
+        }
+
+        return cluster;
+    }
+
+
+    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If not password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
new file mode 100644
index 0000000..caa16ed
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+public interface AmbariServiceDiscoveryMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error during cluster {0} discovery: {1}")
+    void clusterDiscoveryError(final String clusterName,
+                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation {0} failed: {1}")
+    void restInvocationError(final String url,
+                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+    void aliasServiceUserError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+    void aliasServicePasswordError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No user configured for Ambari service discovery.")
+    void aliasServiceUserNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No password configured for Ambari service discovery.")
+    void aliasServicePasswordNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Unexpected REST invocation response code for {0} : {1}")
+    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "REST invocation {0} yielded a response without any JSON.")
+    void noJSON(final String url);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation result: {0}")
+    void debugJSON(final String json);
+
+
+    @Message(level = MessageLevel.INFO,
+            text = "Discovered: Service: {0}, Host: {1}")
+    void discoveredServiceHost(final String serviceName, final String hostName);
+
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
new file mode 100644
index 0000000..723a786
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
+
+    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
+
+    @Override
+    public String getType() {
+        return AmbariServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new AmbariServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
new file mode 100644
index 0000000..0674642
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+import java.util.ArrayList;
+import java.util.List;
+
+class AmbariServiceURLCreator {
+
+    private static final String NAMENODE_SERVICE        = "NAMENODE";
+    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
+    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
+    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
+    private static final String OOZIE_SERVICE           = "OOZIE";
+    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
+    private static final String HIVE_SERVICE            = "HIVE";
+    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
+
+
+    /**
+     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
+     *
+     * @param cluster The cluster discovery results
+     * @param serviceName The name of a Hadoop service
+     *
+     * @return One or more endpoint URLs for the specified service.
+     */
+    public List<String> create(AmbariCluster cluster, String serviceName) {
+        List<String> result = null;
+
+        if (NAMENODE_SERVICE.equals(serviceName)) {
+            result = createNameNodeURL(cluster);
+        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
+            result = createJobTrackerURL(cluster);
+        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
+            result = createWebHDFSURL(cluster);
+        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
+            result = createWebHCatURL(cluster);
+        } else if (OOZIE_SERVICE.equals(serviceName)) {
+            result = createOozieURL(cluster);
+        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
+            result = createWebHBaseURL(cluster);
+        } else if (HIVE_SERVICE.equals(serviceName)) {
+            result = createHiveURL(cluster);
+        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
+            result = createResourceManagerURL(cluster);
+        }
+
+        return result;
+    }
+
+
+    private List<String> createNameNodeURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("NAMENODE");
+        if (comp != null) {
+            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createJobTrackerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
+        if (comp != null) {
+            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHDFSURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
+        if (sc != null) {
+            String address = sc.getProperties().get("dfs.namenode.http-address");
+            result.add("http://" + address + "/webhdfs");
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHCatURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
+        if (webhcat != null) {
+            String port = webhcat.getConfigProperty("templeton.port");
+            String host = webhcat.getHostNames().get(0);
+
+            result.add("http://" + host + ":" + port + "/templeton");
+        }
+        return result;
+    }
+
+
+    private List<String> createOozieURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
+        if (comp != null) {
+            result.add(comp.getConfigProperty("oozie.base.url"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHBaseURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
+        if (comp != null) {
+            for (String host : comp.getHostNames()) {
+                result.add("http://" + host + ":60080");
+            }
+        }
+
+        return result;
+    }
+
+
+    private List<String> createHiveURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
+        if (hive != null) {
+            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
+            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
+            String transport = hive.getConfigProperty("hive.server2.transport.mode");
+            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
+            String host = hive.getHostNames().get(0);
+
+            String scheme = null; // What is the scheme for the binary transport mode?
+            if ("http".equals(transport)) {
+                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
+            }
+
+            result.add(scheme + "://" + host + ":" + port + "/" + path);
+        }
+        return result;
+    }
+
+
+    private List<String> createResourceManagerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
+        if (resMan != null) {
+            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
+            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
+            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
+
+            result.add(scheme + "://" + webappAddress + "/ws");
+        }
+
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..1da4fc9
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file