You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by mo...@apache.org on 2017/09/25 20:20:59 UTC

[01/22] knox git commit: KNOX-1054 - Make DefaultURLManager thread-safe (Phil Zampino via Sandeep More)

Repository: knox
Updated Branches:
  refs/heads/KNOX-998-Package_Restructuring 416ee7c15 -> 668aea180


KNOX-1054 - Make DefaultURLManager thread-safe (Phil Zampino via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/d3f507f9
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/d3f507f9
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/d3f507f9

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: d3f507f9424b7deb355696c1c6d5f5aad142157d
Parents: 2666894
Author: Sandeep More <mo...@apache.org>
Authored: Wed Sep 20 21:47:35 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Sep 20 21:47:35 2017 -0400

----------------------------------------------------------------------
 .../hadoop/gateway/ha/provider/impl/DefaultURLManager.java  | 9 +++------
 1 file changed, 3 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/d3f507f9/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java
index 484a580..e76be7d 100644
--- a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.gateway.ha.provider.impl.i18n.HaMessages;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 
 import java.net.URI;
-import java.util.Iterator;
 import java.util.List;
 import java.util.concurrent.ConcurrentLinkedQueue;
 
@@ -34,8 +33,6 @@ public class DefaultURLManager implements URLManager {
 
   private ConcurrentLinkedQueue<String> urls = new ConcurrentLinkedQueue<>();
 
-  public DefaultURLManager() {
-  }
 
   @Override
   public boolean supportsConfig(HaServiceConfig config) {
@@ -48,7 +45,7 @@ public class DefaultURLManager implements URLManager {
   }
 
   @Override
-  public String getActiveURL() {
+  public synchronized String getActiveURL() {
     return urls.peek();
   }
 
@@ -68,13 +65,13 @@ public class DefaultURLManager implements URLManager {
   }
 
   @Override
-  public List<String> getURLs() {
+  public synchronized List<String> getURLs() {
     return Lists.newArrayList(urls.iterator());
   }
 
   @Override
   public synchronized void setURLs(List<String> urls) {
-    if ( urls != null && !urls.isEmpty()) {
+    if (urls != null && !urls.isEmpty()) {
       this.urls.clear();
       this.urls.addAll(urls);
     }


[18/22] knox git commit: KNOX-998 - Merging from current master

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
new file mode 100644
index 0000000..f57a1d8
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.test.extension;
+
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.Map;
+
+public class SneakyServiceDiscoveryImpl implements ServiceDiscovery {
+    @Override
+    public String getType() {
+        return "ActualType";
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        return null;
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
new file mode 100644
index 0000000..e496819
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.test.extension;
+
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class SneakyServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+        return "DeclaredType";
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new SneakyServiceDiscoveryImpl();
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
new file mode 100644
index 0000000..efee1d8
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.Writer;
+import java.util.*;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+
+public class SimpleDescriptorFactoryTest {
+
+
+    @Test
+    public void testParseJSONSimpleDescriptor() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "admin";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        String fileName = "test-topology.json";
+        File testJSON = null;
+        try {
+            testJSON = writeJSON(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testJSON != null) {
+                try {
+                    testJSON.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+    @Test
+    public void testParseYAMLSimpleDescriptor() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "joeblow";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        String fileName = "test-topology.yml";
+        File testYAML = null;
+        try {
+            testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testYAML != null) {
+                try {
+                    testYAML.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+
+    private void validateSimpleDescriptor(SimpleDescriptor    sd,
+                                          String              discoveryType,
+                                          String              discoveryAddress,
+                                          String              providerConfig,
+                                          String              clusterName,
+                                          Map<String, List<String>> expectedServices) {
+        assertNotNull(sd);
+        assertEquals(discoveryType, sd.getDiscoveryType());
+        assertEquals(discoveryAddress, sd.getDiscoveryAddress());
+        assertEquals(providerConfig, sd.getProviderConfig());
+        assertEquals(clusterName, sd.getClusterName());
+
+        List<SimpleDescriptor.Service> actualServices = sd.getServices();
+
+        assertEquals(expectedServices.size(), actualServices.size());
+
+        for (SimpleDescriptor.Service actualService : actualServices) {
+            assertTrue(expectedServices.containsKey(actualService.getName()));
+            assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs());
+        }
+    }
+
+
+    private File writeJSON(String path, String content) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write(content);
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+
+    private File writeJSON(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write("{" + "\n");
+        fw.write("\"discovery-type\":\"" + discoveryType + "\",\n");
+        fw.write("\"discovery-address\":\"" + discoveryAddress + "\",\n");
+        fw.write("\"discovery-user\":\"" + discoveryUser + "\",\n");
+        fw.write("\"provider-config-ref\":\"" + providerConfig + "\",\n");
+        fw.write("\"cluster\":\"" + clusterName + "\",\n");
+        fw.write("\"services\":[\n");
+
+        int i = 0;
+        for (String name : services.keySet()) {
+            fw.write("{\"name\":\"" + name + "\"");
+            List<String> urls = services.get(name);
+            if (urls != null) {
+                fw.write(", \"urls\":[");
+                Iterator<String> urlIter = urls.iterator();
+                while (urlIter.hasNext()) {
+                    fw.write("\"" + urlIter.next() + "\"");
+                    if (urlIter.hasNext()) {
+                        fw.write(", ");
+                    }
+                }
+                fw.write("]");
+            }
+            fw.write("}");
+            if (i++ < services.size() - 1) {
+                fw.write(",");
+            }
+            fw.write("\n");
+        }
+        fw.write("]\n");
+        fw.write("}\n");
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+    private File writeYAML(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write("---" + "\n");
+        fw.write("discovery-type: " + discoveryType + "\n");
+        fw.write("discovery-address: " + discoveryAddress + "\n");
+        fw.write("discovery-user: " + discoveryUser + "\n");
+        fw.write("provider-config-ref: " + providerConfig + "\n");
+        fw.write("cluster: " + clusterName+ "\n");
+        fw.write("services:\n");
+        for (String name : services.keySet()) {
+            fw.write("    - name: " + name + "\n");
+            List<String> urls = services.get(name);
+            if (urls != null) {
+                fw.write("      urls:\n");
+                for (String url : urls) {
+                    fw.write("          - " + url + "\n");
+                }
+            }
+        }
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
new file mode 100644
index 0000000..b713491
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import org.apache.knox.gateway.topology.validation.TopologyValidator;
+import org.apache.knox.gateway.util.XmlUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathFactory;
+import java.io.*;
+import java.util.*;
+
+import static org.junit.Assert.*;
+
+
+public class SimpleDescriptorHandlerTest {
+
+    private static final String TEST_PROVIDER_CONFIG =
+            "    <gateway>\n" +
+                    "        <provider>\n" +
+                    "            <role>authentication</role>\n" +
+                    "            <name>ShiroProvider</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "            <param>\n" +
+                    "                <!-- \n" +
+                    "                session timeout in minutes,  this is really idle timeout,\n" +
+                    "                defaults to 30mins, if the property value is not defined,, \n" +
+                    "                current client authentication would expire if client idles contiuosly for more than this value\n" +
+                    "                -->\n" +
+                    "                <name>sessionTimeout</name>\n" +
+                    "                <value>30</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm</name>\n" +
+                    "                <value>org.apache.knox.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapContextFactory</name>\n" +
+                    "                <value>org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory</name>\n" +
+                    "                <value>$ldapContextFactory</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.userDnTemplate</name>\n" +
+                    "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory.url</name>\n" +
+                    "                <value>ldap://localhost:33389</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+                    "                <value>simple</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>urls./**</name>\n" +
+                    "                <value>authcBasic</value>\n" +
+                    "            </param>\n" +
+                    "        </provider>\n" +
+                    "\n" +
+                    "        <provider>\n" +
+                    "            <role>identity-assertion</role>\n" +
+                    "            <name>Default</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "        </provider>\n" +
+                    "\n" +
+                    "        <!--\n" +
+                    "        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
+                    "        For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
+                    "        some AWS internal host name.  If the client needs to make a subsequent request to the host identified\n" +
+                    "        in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
+                    "\n" +
+                    "        If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
+                    "        enabled parameter as false.\n" +
+                    "\n" +
+                    "        The name parameter specifies the external host names in a comma separated list.\n" +
+                    "        The value parameter specifies corresponding internal host names in a comma separated list.\n" +
+                    "\n" +
+                    "        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
+                    "        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the\n" +
+                    "        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.\n" +
+                    "        -->\n" +
+                    "        <provider>\n" +
+                    "            <role>hostmap</role>\n" +
+                    "            <name>static</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+                    "        </provider>\n" +
+                    "    </gateway>\n";
+
+
+    /**
+     * KNOX-1006
+     *
+     * N.B. This test depends on the DummyServiceDiscovery extension being configured:
+     *             org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+     */
+    @Test
+    public void testSimpleDescriptorHandler() throws Exception {
+
+        final String type = "DUMMY";
+        final String address = "http://c6401.ambari.apache.org:8080";
+        final String clusterName = "dummy";
+        final Map<String, List<String>> serviceURLs = new HashMap<>();
+        serviceURLs.put("NAMENODE", null);
+        serviceURLs.put("JOBTRACKER", null);
+        serviceURLs.put("WEBHDFS", null);
+        serviceURLs.put("WEBHCAT", null);
+        serviceURLs.put("OOZIE", null);
+        serviceURLs.put("WEBHBASE", null);
+        serviceURLs.put("HIVE", null);
+        serviceURLs.put("RESOURCEMANAGER", null);
+        serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        // Write the externalized provider config to a temp file
+        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
+
+        File topologyFile = null;
+        try {
+            File destDir = (new File(".")).getCanonicalFile();
+
+            // Mock out the simple descriptor
+            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+            EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
+            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+            for (String serviceName : serviceURLs.keySet()) {
+                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+                EasyMock.replay(svc);
+                serviceMocks.add(svc);
+            }
+            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+            EasyMock.replay(testDescriptor);
+
+            // Invoke the simple descriptor handler
+            Map<String, File> files =
+                           SimpleDescriptorHandler.handle(testDescriptor,
+                                                          providerConfig.getParentFile(), // simple desc co-located with provider config
+                                                          destDir);
+            topologyFile = files.get("topology");
+
+            // Validate the resulting topology descriptor
+            assertTrue(topologyFile.exists());
+
+            // Validate the topology descriptor's correctness
+            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
+            if( !validator.validateTopology() ){
+                throw new SAXException( validator.getErrorString() );
+            }
+
+            XPathFactory xPathfactory = XPathFactory.newInstance();
+            XPath xpath = xPathfactory.newXPath();
+
+            // Parse the topology descriptor
+            Document topologyXml = XmlUtils.readXml(topologyFile);
+
+            // Validate the provider configuration
+            Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
+            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+            assertTrue("Resulting provider config should be identical to the referenced content.",
+                       extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
+
+            // Validate the service declarations
+            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+            NodeList serviceNodes =
+                        (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
+            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
+                Node serviceNode = serviceNodes.item(serviceNodeIndex);
+                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+                assertNotNull(roleNode);
+                String role = roleNode.getNodeValue();
+                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
+                    Node urlNode = urlNodes.item(urlNodeIndex);
+                    assertNotNull(urlNode);
+                    String url = urlNode.getNodeValue();
+                    assertNotNull("Every declared service should have a URL.", url);
+                    if (!topologyServiceURLs.containsKey(role)) {
+                        topologyServiceURLs.put(role, new ArrayList<String>());
+                    }
+                    topologyServiceURLs.get(role).add(url);
+                }
+            }
+            assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            fail(e.getMessage());
+        } finally {
+            providerConfig.delete();
+            if (topologyFile != null) {
+                topologyFile.delete();
+            }
+        }
+    }
+
+
+    private File writeProviderConfig(String path, String content) throws IOException {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write(content);
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
deleted file mode 100644
index 82a6f86..0000000
--- a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
+++ /dev/null
@@ -1,21 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.topology.discovery.test.extension.SneakyServiceDiscoveryType
-org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType
-org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscoveryType
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType b/gateway-server/src/test/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..79b4995
--- /dev/null
+++ b/gateway-server/src/test/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,21 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.topology.discovery.test.extension.SneakyServiceDiscoveryType
+org.apache.knox.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType
+org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscoveryType
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
deleted file mode 100644
index 8223bea..0000000
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
+++ /dev/null
@@ -1,74 +0,0 @@
-<gateway>
-    <provider>
-        <role>authentication</role>
-        <name>ShiroProvider</name>
-        <enabled>true</enabled>
-        <param>
-            <!--
-            session timeout in minutes,  this is really idle timeout,
-            defaults to 30mins, if the property value is not defined,,
-            current client authentication would expire if client idles contiuosly for more than this value
-            -->
-            <name>sessionTimeout</name>
-            <value>30</value>
-        </param>
-        <param>
-            <name>main.ldapRealm</name>
-            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
-        </param>
-        <param>
-            <name>main.ldapContextFactory</name>
-            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
-        </param>
-        <param>
-            <name>main.ldapRealm.contextFactory</name>
-            <value>$ldapContextFactory</value>
-        </param>
-        <param>
-            <name>main.ldapRealm.userDnTemplate</name>
-            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
-        </param>
-        <param>
-            <name>main.ldapRealm.contextFactory.url</name>
-            <value>ldap://localhost:33389</value>
-        </param>
-        <param>
-            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
-            <value>simple</value>
-        </param>
-        <param>
-            <name>urls./**</name>
-            <value>authcBasic</value>
-        </param>
-    </provider>
-
-    <provider>
-        <role>identity-assertion</role>
-        <name>Default</name>
-        <enabled>true</enabled>
-    </provider>
-
-    <!--
-    Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
-    For example, a hadoop service running in AWS may return a response that includes URLs containing the
-    some AWS internal host name.  If the client needs to make a subsequent request to the host identified
-    in those URLs they need to be mapped to external host names that the client Knox can use to connect.
-
-    If the external hostname and internal host names are same turn of this provider by setting the value of
-    enabled parameter as false.
-
-    The name parameter specifies the external host names in a comma separated list.
-    The value parameter specifies corresponding internal host names in a comma separated list.
-
-    Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
-    of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the
-    Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.
-    -->
-    <provider>
-        <role>hostmap</role>
-        <name>static</name>
-        <enabled>true</enabled>
-        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
-    </provider>
-
-</gateway>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
deleted file mode 100644
index 45407a7..0000000
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-  "discovery-type":"DUMMY",
-  "discovery-address":"http://c6401.ambari.apache.org:8080",
-  "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml",
-  "cluster":"dummy",
-  "services":[
-    {"name":"NAMENODE"},
-    {"name":"JOBTRACKER"},
-    {"name":"WEBHDFS"},
-    {"name":"WEBHCAT"},
-    {"name":"OOZIE"},
-    {"name":"WEBHBASE"},
-    {"name":"HIVE"},
-    {"name":"RESOURCEMANAGER"},
-    {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]},
-    {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]}
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml
new file mode 100644
index 0000000..8223bea
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml
@@ -0,0 +1,74 @@
+<gateway>
+    <provider>
+        <role>authentication</role>
+        <name>ShiroProvider</name>
+        <enabled>true</enabled>
+        <param>
+            <!--
+            session timeout in minutes,  this is really idle timeout,
+            defaults to 30mins, if the property value is not defined,,
+            current client authentication would expire if client idles contiuosly for more than this value
+            -->
+            <name>sessionTimeout</name>
+            <value>30</value>
+        </param>
+        <param>
+            <name>main.ldapRealm</name>
+            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+        </param>
+        <param>
+            <name>main.ldapContextFactory</name>
+            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory</name>
+            <value>$ldapContextFactory</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.userDnTemplate</name>
+            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory.url</name>
+            <value>ldap://localhost:33389</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+            <value>simple</value>
+        </param>
+        <param>
+            <name>urls./**</name>
+            <value>authcBasic</value>
+        </param>
+    </provider>
+
+    <provider>
+        <role>identity-assertion</role>
+        <name>Default</name>
+        <enabled>true</enabled>
+    </provider>
+
+    <!--
+    Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
+    For example, a hadoop service running in AWS may return a response that includes URLs containing the
+    some AWS internal host name.  If the client needs to make a subsequent request to the host identified
+    in those URLs they need to be mapped to external host names that the client Knox can use to connect.
+
+    If the external hostname and internal host names are same turn of this provider by setting the value of
+    enabled parameter as false.
+
+    The name parameter specifies the external host names in a comma separated list.
+    The value parameter specifies corresponding internal host names in a comma separated list.
+
+    Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
+    of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the
+    Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.
+    -->
+    <provider>
+        <role>hostmap</role>
+        <name>static</name>
+        <enabled>true</enabled>
+        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+    </provider>
+
+</gateway>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-topology-four.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-topology-four.json b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-topology-four.json
new file mode 100644
index 0000000..45407a7
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-topology-four.json
@@ -0,0 +1,18 @@
+{
+  "discovery-type":"DUMMY",
+  "discovery-address":"http://c6401.ambari.apache.org:8080",
+  "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml",
+  "cluster":"dummy",
+  "services":[
+    {"name":"NAMENODE"},
+    {"name":"JOBTRACKER"},
+    {"name":"WEBHDFS"},
+    {"name":"WEBHCAT"},
+    {"name":"OOZIE"},
+    {"name":"WEBHBASE"},
+    {"name":"HIVE"},
+    {"name":"RESOURCEMANAGER"},
+    {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]},
+    {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]}
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
deleted file mode 100644
index 554ddbe..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-import java.lang.annotation.*;
-
-/**
- * This annotation can be used to inject gateway services into a ServiceDiscovery implementation.
- */
-@Documented
-@Target(ElementType.FIELD)
-@Retention(RetentionPolicy.RUNTIME)
-public @interface GatewayService {
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
deleted file mode 100644
index eefa30b..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-import java.util.List;
-import java.util.Map;
-
-
-/**
- * Implementations provide the means by which Hadoop service endpoint URLs are discovered from a source with knowledge
- * about the service topology of one or more clusters.
- */
-public interface ServiceDiscovery {
-
-    /**
-     * This is the type specified in a simple descriptor to indicate which ServiceDiscovery implementation to employ.
-     *
-     * @return The identifier for the service discovery type.
-     */
-    String getType();
-
-
-    /**
-     * Discover details of all the clusters known to the target registry.
-     *
-     * @param config The configuration for the discovery invocation
-     *
-     * @return A Map of the discovered service data, keyed by the cluster name.
-     */
-    Map<String, Cluster> discover(ServiceDiscoveryConfig config);
-
-
-    /**
-     * Discover details for a single cluster.
-     *
-     * @param config The configuration for the discovery invocation
-     * @param clusterName The name of a particular cluster
-     *
-     * @return The discovered service data for the specified cluster
-     */
-    Cluster discover(ServiceDiscoveryConfig config, String clusterName);
-
-
-    /**
-     * A handle to the service discovery result.
-     */
-    interface Cluster {
-
-        /**
-         * @return The name of the cluster
-         */
-        String getName();
-
-        /**
-         * @param serviceName The name of the service
-         * @return The URLs for the specified service in this cluster.
-         */
-        List<String> getServiceURLs(String serviceName);
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
deleted file mode 100644
index 6b2e741..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-/**
- * ServiceDiscovery configuration details.
- */
-public interface ServiceDiscoveryConfig {
-
-    /**
-     *
-     * @return The address of the discovery source.
-     */
-    String getAddress();
-
-    /**
-     *
-     * @return The username configured for interactions with the discovery source.
-     */
-    String getUser();
-
-    /**
-     *
-     * @return The alias for the password required for interactions with the discovery source.
-     */
-    String getPasswordAlias();
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
deleted file mode 100644
index cddced1..0000000
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-/**
- * ServiceDiscovery extensions must implement this interface to add support for a new discovery source.
- *
- * The ServiceLoader mechanism is used to include ServiceDiscovery extensions, and implementations of this interface
- * are the providers.
- */
-public interface ServiceDiscoveryType {
-
-    /**
-     *
-     * @return The identifier for the discovery type.
-     */
-    String getType();
-
-
-    /**
-     *
-     * @return A new instance of the ServiceDiscovery implementation provided by this type.
-     */
-    ServiceDiscovery newInstance();
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/GatewayService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/GatewayService.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/GatewayService.java
new file mode 100644
index 0000000..f19a76b
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/GatewayService.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+import java.lang.annotation.*;
+
+/**
+ * This annotation can be used to inject gateway services into a ServiceDiscovery implementation.
+ */
+@Documented
+@Target(ElementType.FIELD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface GatewayService {
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscovery.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscovery.java
new file mode 100644
index 0000000..03effdd
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscovery.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ * Implementations provide the means by which Hadoop service endpoint URLs are discovered from a source with knowledge
+ * about the service topology of one or more clusters.
+ */
+public interface ServiceDiscovery {
+
+    /**
+     * This is the type specified in a simple descriptor to indicate which ServiceDiscovery implementation to employ.
+     *
+     * @return The identifier for the service discovery type.
+     */
+    String getType();
+
+
+    /**
+     * Discover details of all the clusters known to the target registry.
+     *
+     * @param config The configuration for the discovery invocation
+     *
+     * @return A Map of the discovered service data, keyed by the cluster name.
+     */
+    Map<String, Cluster> discover(ServiceDiscoveryConfig config);
+
+
+    /**
+     * Discover details for a single cluster.
+     *
+     * @param config The configuration for the discovery invocation
+     * @param clusterName The name of a particular cluster
+     *
+     * @return The discovered service data for the specified cluster
+     */
+    Cluster discover(ServiceDiscoveryConfig config, String clusterName);
+
+
+    /**
+     * A handle to the service discovery result.
+     */
+    interface Cluster {
+
+        /**
+         * @return The name of the cluster
+         */
+        String getName();
+
+        /**
+         * @param serviceName The name of the service
+         * @return The URLs for the specified service in this cluster.
+         */
+        List<String> getServiceURLs(String serviceName);
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryConfig.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryConfig.java
new file mode 100644
index 0000000..718c984
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryConfig.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+/**
+ * ServiceDiscovery configuration details.
+ */
+public interface ServiceDiscoveryConfig {
+
+    /**
+     *
+     * @return The address of the discovery source.
+     */
+    String getAddress();
+
+    /**
+     *
+     * @return The username configured for interactions with the discovery source.
+     */
+    String getUser();
+
+    /**
+     *
+     * @return The alias for the password required for interactions with the discovery source.
+     */
+    String getPasswordAlias();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryType.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryType.java
new file mode 100644
index 0000000..5cf96a9
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryType.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+/**
+ * ServiceDiscovery extensions must implement this interface to add support for a new discovery source.
+ *
+ * The ServiceLoader mechanism is used to include ServiceDiscovery extensions, and implementations of this interface
+ * are the providers.
+ */
+public interface ServiceDiscoveryType {
+
+    /**
+     *
+     * @return The identifier for the discovery type.
+     */
+    String getType();
+
+
+    /**
+     *
+     * @return A new instance of the ServiceDiscovery implementation provided by this type.
+     */
+    ServiceDiscovery newInstance();
+
+}


[10/22] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by mo...@apache.org.
KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c2ca4432
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c2ca4432
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c2ca4432

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: c2ca443262a848ae0d56e03e92ecba32fbf149f2
Parents: 3a0119b
Author: Larry McCay <lm...@hortonworks.com>
Authored: Sat Sep 23 11:04:39 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Sat Sep 23 11:04:39 2017 -0400

----------------------------------------------------------------------
 b/gateway-discovery-ambari/pom.xml              |  66 ++
 .../discovery/ambari/AmbariCluster.java         | 114 +++
 .../discovery/ambari/AmbariComponent.java       |  76 ++
 .../ambari/AmbariServiceDiscovery.java          | 291 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 ++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/AmbariServiceURLCreator.java         | 184 ++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 +++++++++++++++++++
 b/gateway-release/home/conf/descriptors/README  |   1 +
 .../home/conf/shared-providers/README           |   1 +
 gateway-discovery-ambari/pom.xml                |  66 ++
 .../discovery/ambari/AmbariCluster.java         | 114 +++
 .../discovery/ambari/AmbariComponent.java       |  76 ++
 .../ambari/AmbariServiceDiscovery.java          | 291 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 ++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/AmbariServiceURLCreator.java         | 184 ++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 +++++++++++++++++++
 gateway-release/home/conf/descriptors/README    |   1 +
 .../home/conf/shared-providers/README           |   1 +
 gateway-release/pom.xml                         |   4 +
 gateway-server/pom.xml                          |   5 +
 .../apache/hadoop/gateway/GatewayMessages.java  |   9 +-
 .../services/DefaultGatewayServices.java        |   3 +-
 .../topology/impl/DefaultTopologyService.java   | 278 +++++-
 .../builder/BeanPropertyTopologyBuilder.java    |   2 +-
 .../DefaultServiceDiscoveryConfig.java          |  48 ++
 .../discovery/ServiceDiscoveryFactory.java      |  81 ++
 .../topology/simple/SimpleDescriptor.java       |  46 +
 .../simple/SimpleDescriptorFactory.java         |  71 ++
 .../simple/SimpleDescriptorHandler.java         | 186 ++++
 .../topology/simple/SimpleDescriptorImpl.java   | 111 +++
 .../simple/SimpleDescriptorMessages.java        |  44 +
 .../topology/DefaultTopologyServiceTest.java    |  70 +-
 .../PropertiesFileServiceDiscoveryTest.java     |  90 ++
 .../discovery/ServiceDiscoveryFactoryTest.java  |  81 ++
 .../test/extension/DummyServiceDiscovery.java   |  66 ++
 .../extension/DummyServiceDiscoveryType.java    |  32 +
 .../PropertiesFileServiceDiscovery.java         | 108 +++
 .../PropertiesFileServiceDiscoveryType.java     |  35 +
 .../extension/SneakyServiceDiscoveryImpl.java   |  40 +
 .../extension/SneakyServiceDiscoveryType.java   |  33 +
 .../simple/SimpleDescriptorFactoryTest.java     | 218 +++++
 .../simple/SimpleDescriptorHandlerTest.java     | 239 ++++++
 ...eway.topology.discovery.ServiceDiscoveryType |  21 +
 .../topology/file/ambari-cluster-policy.xml     |  74 ++
 .../topology/file/simple-topology-four.json     |  18 +
 .../topology/discovery/GatewayService.java      |  29 +
 .../topology/discovery/ServiceDiscovery.java    |  76 ++
 .../discovery/ServiceDiscoveryConfig.java       |  42 +
 .../discovery/ServiceDiscoveryType.java         |  40 +
 pom.xml                                         |  27 +-
 54 files changed, 5626 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/pom.xml
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/pom.xml b/b/gateway-discovery-ambari/pom.xml
new file mode 100644
index 0000000..924e89c
--- /dev/null
+++ b/b/gateway-discovery-ambari/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.knox</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.14.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-discovery-ambari</artifactId>
+
+    <name>gateway-discovery-ambari</name>
+    <description>The extension to the gateway for service discovery using Apache Ambari.</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-spi</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
new file mode 100644
index 0000000..6eaabd3
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class AmbariCluster implements ServiceDiscovery.Cluster {
+
+    private String name = null;
+
+    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+    private Map<String, AmbariComponent> components = null;
+
+
+    AmbariCluster(String name) {
+        this.name = name;
+        components = new HashMap<String, AmbariComponent>();
+    }
+
+    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
+        if (!serviceConfigurations.keySet().contains(serviceName)) {
+            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+        }
+        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
+    }
+
+
+    void addComponent(AmbariComponent component) {
+        components.put(component.getName(), component);
+    }
+
+
+    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
+        ServiceConfiguration sc = null;
+        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+        if (configs != null) {
+            sc = configs.get(configurationType);
+        }
+        return sc;
+    }
+
+
+    Map<String, AmbariComponent> getComponents() {
+        return components;
+    }
+
+
+    AmbariComponent getComponent(String name) {
+        return components.get(name);
+    }
+
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+
+    @Override
+    public List<String> getServiceURLs(String serviceName) {
+        List<String> urls = new ArrayList<>();
+        urls.addAll(urlCreator.create(this, serviceName));
+        return urls;
+    }
+
+
+    static class ServiceConfiguration {
+
+        private String type;
+        private String version;
+        private Map<String, String> props;
+
+        ServiceConfiguration(String type, String version, Map<String, String> properties) {
+            this.type = type;
+            this.version = version;
+            this.props = properties;
+        }
+
+        public String getVersion() {
+            return version;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public Map<String, String> getProperties() {
+            return props;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
new file mode 100644
index 0000000..55257fb
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import java.util.List;
+import java.util.Map;
+
+class AmbariComponent {
+
+    private String clusterName = null;
+    private String serviceName = null;
+    private String name        = null;
+    private String version     = null;
+
+    private List<String> hostNames = null;
+
+    private Map<String, String> properties = null;
+
+    AmbariComponent(String              name,
+                    String              version,
+                    String              cluster,
+                    String              service,
+                    List<String>        hostNames,
+                    Map<String, String> properties) {
+        this.name = name;
+        this.serviceName = service;
+        this.clusterName = cluster;
+        this.version = version;
+        this.hostNames = hostNames;
+        this.properties = properties;
+    }
+
+    public String getVersion() {
+        return version;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String getServiceName() {
+        return serviceName;
+    }
+
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    public List<String> getHostNames() {
+        return hostNames;
+    }
+
+    public Map<String, String> getConfigProperties() {
+        return properties;
+    }
+
+    public String getConfigProperty(String propertyName) {
+        return properties.get(propertyName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
new file mode 100644
index 0000000..34f20a7
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.AliasServiceException;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+import java.util.*;
+
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "AMBARI";
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    // Map of component names to service configuration types
+    private static Map<String, String> componentServiceConfigs = new HashMap<>();
+    static {
+        componentServiceConfigs.put("NAMENODE", "hdfs-site");
+        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
+        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
+        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
+        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
+        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
+    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    @GatewayService
+    private AliasService aliasService;
+
+    private CloseableHttpClient httpClient = null;
+
+    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
+
+
+    AmbariServiceDiscovery() {
+        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+    }
+
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+        String discoveryAddress = config.getAddress();
+
+        // Invoke Ambari REST API to discover the available clusters
+        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+        // Parse the cluster names from the response, and perform the cluster discovery
+        JSONArray clusterItems = (JSONArray) json.get("items");
+        for (Object clusterItem : clusterItems) {
+            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+            try {
+                Cluster c = discover(config, clusterName);
+                clusters.put(clusterName, c);
+            } catch (Exception e) {
+                log.clusterDiscoveryError(clusterName, e);
+            }
+        }
+
+        return clusters;
+    }
+
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        AmbariCluster cluster = new AmbariCluster(clusterName);
+
+        Map<String, String> serviceComponents = new HashMap<>();
+
+        String discoveryAddress = config.getAddress();
+        String discoveryUser = config.getUser();
+        String discoveryPwdAlias = config.getPasswordAlias();
+
+        Map<String, List<String>> componentHostNames = new HashMap<>();
+        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+        if (hostRolesJSON != null) {
+            // Process the host roles JSON
+            JSONArray items = (JSONArray) hostRolesJSON.get("items");
+            for (Object obj : items) {
+                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+                for (Object component : components) {
+                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+                    for (Object hostComponent : hostComponents) {
+                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+                        String serviceName = (String) hostRoles.get("service_name");
+                        String componentName = (String) hostRoles.get("component_name");
+
+                        serviceComponents.put(componentName, serviceName);
+
+//                    String hostName = (String) hostRoles.get("host_name");
+                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
+                        log.discoveredServiceHost(serviceName, hostName);
+                        if (!componentHostNames.containsKey(componentName)) {
+                            componentHostNames.put(componentName, new ArrayList<String>());
+                        }
+                        componentHostNames.get(componentName).add(hostName);
+                    }
+                }
+            }
+        }
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<String, String>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                }
+            }
+        }
+
+        // Construct the AmbariCluster model
+        for (String componentName : serviceComponents.keySet()) {
+            String serviceName = serviceComponents.get(componentName);
+            List<String> hostNames = componentHostNames.get(componentName);
+
+            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+            String configType = componentServiceConfigs.get(componentName);
+            if (configType != null) {
+                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+                AmbariComponent c = new AmbariComponent(componentName,
+                                                        svcConfig.getVersion(),
+                                                        clusterName,
+                                                        serviceName,
+                                                        hostNames,
+                                                        svcConfig.getProperties());
+                cluster.addComponent(c);
+            }
+        }
+
+        return cluster;
+    }
+
+
+    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If not password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
new file mode 100644
index 0000000..caa16ed
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+public interface AmbariServiceDiscoveryMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error during cluster {0} discovery: {1}")
+    void clusterDiscoveryError(final String clusterName,
+                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation {0} failed: {1}")
+    void restInvocationError(final String url,
+                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+    void aliasServiceUserError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+    void aliasServicePasswordError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No user configured for Ambari service discovery.")
+    void aliasServiceUserNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No password configured for Ambari service discovery.")
+    void aliasServicePasswordNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Unexpected REST invocation response code for {0} : {1}")
+    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "REST invocation {0} yielded a response without any JSON.")
+    void noJSON(final String url);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation result: {0}")
+    void debugJSON(final String json);
+
+
+    @Message(level = MessageLevel.INFO,
+            text = "Discovered: Service: {0}, Host: {1}")
+    void discoveredServiceHost(final String serviceName, final String hostName);
+
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
new file mode 100644
index 0000000..723a786
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
+
+    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
+
+    @Override
+    public String getType() {
+        return AmbariServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new AmbariServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
new file mode 100644
index 0000000..0674642
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+import java.util.ArrayList;
+import java.util.List;
+
+class AmbariServiceURLCreator {
+
+    private static final String NAMENODE_SERVICE        = "NAMENODE";
+    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
+    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
+    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
+    private static final String OOZIE_SERVICE           = "OOZIE";
+    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
+    private static final String HIVE_SERVICE            = "HIVE";
+    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
+
+
+    /**
+     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
+     *
+     * @param cluster The cluster discovery results
+     * @param serviceName The name of a Hadoop service
+     *
+     * @return One or more endpoint URLs for the specified service.
+     */
+    public List<String> create(AmbariCluster cluster, String serviceName) {
+        List<String> result = null;
+
+        if (NAMENODE_SERVICE.equals(serviceName)) {
+            result = createNameNodeURL(cluster);
+        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
+            result = createJobTrackerURL(cluster);
+        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
+            result = createWebHDFSURL(cluster);
+        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
+            result = createWebHCatURL(cluster);
+        } else if (OOZIE_SERVICE.equals(serviceName)) {
+            result = createOozieURL(cluster);
+        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
+            result = createWebHBaseURL(cluster);
+        } else if (HIVE_SERVICE.equals(serviceName)) {
+            result = createHiveURL(cluster);
+        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
+            result = createResourceManagerURL(cluster);
+        }
+
+        return result;
+    }
+
+
+    private List<String> createNameNodeURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("NAMENODE");
+        if (comp != null) {
+            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createJobTrackerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
+        if (comp != null) {
+            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHDFSURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
+        if (sc != null) {
+            String address = sc.getProperties().get("dfs.namenode.http-address");
+            result.add("http://" + address + "/webhdfs");
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHCatURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
+        if (webhcat != null) {
+            String port = webhcat.getConfigProperty("templeton.port");
+            String host = webhcat.getHostNames().get(0);
+
+            result.add("http://" + host + ":" + port + "/templeton");
+        }
+        return result;
+    }
+
+
+    private List<String> createOozieURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
+        if (comp != null) {
+            result.add(comp.getConfigProperty("oozie.base.url"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHBaseURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
+        if (comp != null) {
+            for (String host : comp.getHostNames()) {
+                result.add("http://" + host + ":60080");
+            }
+        }
+
+        return result;
+    }
+
+
+    private List<String> createHiveURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
+        if (hive != null) {
+            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
+            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
+            String transport = hive.getConfigProperty("hive.server2.transport.mode");
+            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
+            String host = hive.getHostNames().get(0);
+
+            String scheme = null; // What is the scheme for the binary transport mode?
+            if ("http".equals(transport)) {
+                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
+            }
+
+            result.add(scheme + "://" + host + ":" + port + "/" + path);
+        }
+        return result;
+    }
+
+
+    private List<String> createResourceManagerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
+        if (resMan != null) {
+            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
+            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
+            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
+
+            result.add(scheme + "://" + webappAddress + "/ws");
+        }
+
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..1da4fc9
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file


[06/22] knox git commit: KNOX-1061 - KnoxSSO Redirects with Query Params in the OriginalUrl Broken

Posted by mo...@apache.org.
KNOX-1061 - KnoxSSO Redirects with Query Params in the OriginalUrl Broken

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/3a0119b2
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/3a0119b2
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/3a0119b2

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 3a0119b217bb71d107b335c27abac77847b2bfe4
Parents: 5432c87
Author: Larry McCay <lm...@hortonworks.com>
Authored: Fri Sep 22 17:30:13 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Fri Sep 22 17:30:13 2017 -0400

----------------------------------------------------------------------
 ...entityAsserterHttpServletRequestWrapper.java | 25 +++++++++++++-------
 .../apache/hadoop/gateway/util/HttpUtils.java   |  7 +++---
 2 files changed, 20 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/3a0119b2/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
index 961fef7..dfce6cd 100644
--- a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
@@ -71,21 +71,30 @@ private static SpiGatewayMessages log = MessagesFactory.get( SpiGatewayMessages.
     return super.getParameter(name);
   }
   
-  @SuppressWarnings("rawtypes")
   @Override
-  public Map getParameterMap() {
-    Map map = null;
+  public Map<String, String[]> getParameterMap() {
+    Map<String, String[]> map = null;
     try {
-      map = getParams();
+      map = convertValuesToStringArrays(getParams());
     } catch (UnsupportedEncodingException e) {
       log.unableToGetParamsFromQueryString(e);
     }
     return map;
   }
 
-  @SuppressWarnings({ "unchecked", "rawtypes" })
+  private Map<String, String[]> convertValuesToStringArrays(Map<String, List<String>> params) {
+    Map<String, String[]> arrayMap = new HashMap<String, String[]>();
+    String name = null;
+    Enumeration<String> names = getParameterNames();
+    while (names.hasMoreElements()) {
+      name = (String) names.nextElement();
+      arrayMap.put(name, getParameterValues(name));
+    }
+    return arrayMap;
+  }
+
   @Override
-  public Enumeration getParameterNames() {
+  public Enumeration<String> getParameterNames() {
     Enumeration<String> e = null;
     Map<String, List<String>> params;
     try {
@@ -103,14 +112,14 @@ private static SpiGatewayMessages log = MessagesFactory.get( SpiGatewayMessages.
 
   @Override
   public String[] getParameterValues(String name) {
-    String[] p = null;
+    String[] p = {};
     Map<String, List<String>> params;
     try {
       params = getParams();
       if (params == null) {
         params = new HashMap<>();
       }
-      p = (String[]) params.get(name).toArray();
+      p = (String[]) params.get(name).toArray(p);
     } catch (UnsupportedEncodingException e) {
       log.unableToGetParamsFromQueryString(e);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/3a0119b2/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java b/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java
index 7f5e96a..2b2d0eb 100644
--- a/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java
+++ b/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java
@@ -20,10 +20,9 @@ package org.apache.hadoop.gateway.util;
 import java.io.UnsupportedEncodingException;
 import java.net.URL;
 import java.net.URLDecoder;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.StringTokenizer;
@@ -32,7 +31,7 @@ public class HttpUtils {
 
   public static Map<String, List<String>> splitQuery(String queryString)
       throws UnsupportedEncodingException {
-    final Map<String, List<String>> queryPairs = new LinkedHashMap<String, List<String>>();
+    final Map<String, List<String>> queryPairs = new HashMap<String, List<String>>();
     if (queryString == null || queryString.trim().isEmpty()) {
       return queryPairs;
     }
@@ -41,7 +40,7 @@ public class HttpUtils {
       final int idx = pair.indexOf("=");
       final String key = idx > 0 ? URLDecoder.decode(pair.substring(0, idx), "UTF-8") : pair;
       if (!queryPairs.containsKey(key)) {
-        queryPairs.put(key, new LinkedList<String>());
+        queryPairs.put(key, new ArrayList<String>());
       }
       final String value = idx > 0 && pair.length() > idx + 1 
           ? URLDecoder.decode(pair.substring(idx + 1), "UTF-8") : "";


[20/22] knox git commit: KNOX-998 - Merging from current master

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
new file mode 100644
index 0000000..ec8aed2
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -0,0 +1,856 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+
+/**
+ * Test the Ambari ServiceDiscovery implementation.
+ *
+ * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
+ *      treatment of the responses as they were observed at the time the tests are developed.
+ */
+public class AmbariServiceDiscoveryTest {
+
+    @Test
+    public void testSingleClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "testCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster);
+    }
+
+
+    @Test
+    public void testBulkClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "anotherCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
+        assertNotNull(clusters);
+        assertEquals(1, clusters.size());
+        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
+        final String[] services = new String[]{"NAMENODE",
+                                               "JOBTRACKER",
+                                               "WEBHDFS",
+                                               "WEBHCAT",
+                                               "OOZIE",
+                                               "WEBHBASE",
+                                               "HIVE",
+                                               "RESOURCEMANAGER"};
+        printServiceURLs(cluster, services);
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            StringBuilder sb = new StringBuilder();
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    sb.append(url);
+                    sb.append(" ");
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, sb.toString()));
+        }
+    }
+
+
+    /**
+     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
+     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
+     */
+    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
+
+        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
+
+        private Map<String, JSONObject> cannedResponses = new HashMap<>();
+
+        TestAmbariServiceDiscovery(String clusterName) {
+            cannedResponses.put(AMBARI_CLUSTERS_URI,
+                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                               clusterName)));
+
+            cannedResponses.put(String.format(AMBARI_HOSTROLES_URI, clusterName),
+                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                clusterName)));
+
+            cannedResponses.put(String.format(AMBARI_SERVICECONFIGS_URI, clusterName),
+                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                     clusterName)));
+        }
+
+        @Override
+        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+            return cannedResponses.get(url.substring(url.indexOf("/api")));
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////
+    //  JSON response templates, based on actual response content excerpts
+    ////////////////////////////////////////////////////////////////////////
+
+    private static final String CLUSTERS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"Clusters\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"version\" : \"HDP-2.6\"\n" +
+    "      }\n" +
+    "    }\n" +
+    "  ]" +
+    "}";
+
+
+    private static final String HOSTROLES_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HBASE_MASTER\",\n" +
+    "            \"service_name\" : \"HBASE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HBASE_MASTER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HBASE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HDFS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NAMENODE\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HIVE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HCAT\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HCAT\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"OOZIE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "            \"service_name\" : \"OOZIE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"OOZIE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"YARN\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NODEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NODEMANAGER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "                \"ha_state\" : \"ACTIVE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"ZOOKEEPER\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "            \"service_name\" : \"ZOOKEEPER\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}\n";
+
+
+    private static final String SERVICECONFIGS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hbase-site\",\n" +
+    "          \"tag\" : \"version1503410563715\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
+    "            \"hbase.master.info.port\" : \"16010\",\n" +
+    "            \"hbase.master.port\" : \"16000\",\n" +
+    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
+    "            \"hbase.regionserver.port\" : \"16020\",\n" +
+    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
+    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
+    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
+    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
+    "      \"service_name\" : \"HBASE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hdfs-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
+    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
+    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
+    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
+    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
+    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"dfs.https.port\" : \"50470\",\n" +
+    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
+    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
+    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
+    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
+    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
+    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
+    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
+    "              \"dfs.namenode.http-address\" : \"true\",\n" +
+    "              \"dfs.support.append\" : \"true\",\n" +
+    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
+    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
+    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"core-site\",\n" +
+    "          \"tag\" : \"version1502131215159\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
+    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"fs.defaultFS\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 2,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HDFS\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-env\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive_security_authorization\" : \"None\",\n" +
+    "            \"webhcat_user\" : \"hcat\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hiveserver2-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
+    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
+    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
+    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
+    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
+    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
+    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
+    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
+    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
+    "            \"hive.server2.webui.port\" : \"10502\",\n" +
+    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
+    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
+    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-site\",\n" +
+    "          \"tag\" : \"version1502130841736\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
+    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
+    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
+    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
+    "            \"hive.server2.authentication\" : \"NONE\",\n" +
+    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
+    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
+    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
+    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
+    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
+    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
+    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
+    "            \"hive.server2.transport.mode\" : \"http\",\n" +
+    "            \"hive.server2.use.SSL\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"hidden\" : {\n" +
+    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"webhcat-site\",\n" +
+    "          \"tag\" : \"version1502131111746\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"templeton.port\" : \"50111\",\n" +
+    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
+    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502131110745,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HIVE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"oozie-site\",\n" +
+    "          \"tag\" : \"version1502131137103\",\n" +
+    "          \"version\" : 3,\n" +
+    "          \"properties\" : {\n" +
+    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_name\" : \"OOZIE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502122253525,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
+    "      \"service_name\" : \"TEZ\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"yarn-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
+    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"yarn.acl.enable\" : \"false\",\n" +
+    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
+    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
+    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
+    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
+    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
+    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
+    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
+    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
+    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
+    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_name\" : \"YARN\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
index 790a7a5..361a1ff 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
@@ -576,7 +576,7 @@ public abstract class AbstractJWTFilterTest  {
     }
 
     /* (non-Javadoc)
-     * @see org.apache.knox.gateway.services.security.token.JWTokenAuthority#verifyToken(org.apache.hadoop.gateway.services.security.token.impl.JWT)
+     * @see org.apache.knox.gateway.services.security.token.JWTokenAuthority#verifyToken(org.apache.knox.gateway.services.security.token.impl.JWT)
      */
     @Override
     public boolean verifyToken(JWT token) throws TokenServiceException {

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
deleted file mode 100644
index 6534b5e..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-public class DefaultServiceDiscoveryConfig implements ServiceDiscoveryConfig {
-    private String address  = null;
-    private String user     = null;
-    private String pwdAlias = null;
-
-    public DefaultServiceDiscoveryConfig(String address) {
-        this.address = address;
-    }
-
-    public void setUser(String username) {
-        this.user = username;
-    }
-
-    public void setPasswordAlias(String alias) {
-        this.pwdAlias = alias;
-    }
-
-    public String getAddress() {
-        return address;
-    }
-
-    public String getUser() {
-        return user;
-    }
-
-    public String getPasswordAlias() {
-        return pwdAlias;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
deleted file mode 100644
index 70d5f61..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-import org.apache.hadoop.gateway.services.Service;
-
-import java.lang.reflect.Field;
-import java.util.ServiceLoader;
-
-/**
- * Creates instances of ServiceDiscovery implementations.
- *
- * This factory uses the ServiceLoader mechanism to load ServiceDiscovery implementations as extensions.
- *
- */
-public abstract class ServiceDiscoveryFactory {
-
-    private static final Service[] NO_GATEWAY_SERVICS = new Service[]{};
-
-
-    public static ServiceDiscovery get(String type) {
-        return get(type, NO_GATEWAY_SERVICS);
-    }
-
-
-    public static ServiceDiscovery get(String type, Service...gatewayServices) {
-        ServiceDiscovery sd  = null;
-
-        // Look up the available ServiceDiscovery types
-        ServiceLoader<ServiceDiscoveryType> loader = ServiceLoader.load(ServiceDiscoveryType.class);
-        for (ServiceDiscoveryType sdt : loader) {
-            if (sdt.getType().equalsIgnoreCase(type)) {
-                try {
-                    ServiceDiscovery instance = sdt.newInstance();
-                    // Make sure the type reported by the instance matches the type declared by the factory
-                    // (is this necessary?)
-                    if (instance.getType().equalsIgnoreCase(type)) {
-                        sd = instance;
-
-                        // Inject any gateway services that were specified, and which are referenced in the impl
-                        if (gatewayServices != null && gatewayServices.length > 0) {
-                            for (Field field : sd.getClass().getDeclaredFields()) {
-                                if (field.getDeclaredAnnotation(GatewayService.class) != null) {
-                                    for (Service s : gatewayServices) {
-                                        if (s != null) {
-                                            if (field.getType().isAssignableFrom(s.getClass())) {
-                                                field.setAccessible(true);
-                                                field.set(sd, s);
-                                            }
-                                        }
-                                    }
-                                }
-                            }
-                        }
-                        break;
-                    }
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-
-        return sd;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
deleted file mode 100644
index aa28469..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.simple;
-
-import java.util.List;
-
-public interface SimpleDescriptor {
-
-    String getName();
-
-    String getDiscoveryType();
-
-    String getDiscoveryAddress();
-
-    String getDiscoveryUser();
-
-    String getDiscoveryPasswordAlias();
-
-    String getClusterName();
-
-    String getProviderConfig();
-
-    List<Service> getServices();
-
-
-    interface Service {
-        String getName();
-
-        List<String> getURLs();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
deleted file mode 100644
index 3df6d2f..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.simple;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
-import org.apache.commons.io.FilenameUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-
-public class SimpleDescriptorFactory {
-
-    /**
-     * Create a SimpleDescriptor from the specified file.
-     *
-     * @param path The path to the file.
-     * @return A SimpleDescriptor based on the contents of the file.
-     *
-     * @throws IOException
-     */
-    public static SimpleDescriptor parse(String path) throws IOException {
-        SimpleDescriptor sd;
-
-        if (path.endsWith(".json")) {
-            sd = parseJSON(path);
-        } else if (path.endsWith(".yml")) {
-            sd = parseYAML(path);
-        } else {
-           throw new IllegalArgumentException("Unsupported simple descriptor format: " + path.substring(path.lastIndexOf('.')));
-        }
-
-        return sd;
-    }
-
-
-    static SimpleDescriptor parseJSON(String path) throws IOException {
-        final ObjectMapper mapper = new ObjectMapper();
-        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
-        if (sd != null) {
-            sd.setName(FilenameUtils.getBaseName(path));
-        }
-        return sd;
-    }
-
-
-    static SimpleDescriptor parseYAML(String path) throws IOException {
-        final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
-        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
-        if (sd != null) {
-            sd.setName(FilenameUtils.getBaseName(path));
-        }
-        return sd;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
deleted file mode 100644
index fb563fa..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.simple;
-
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.Service;
-import org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
-
-import java.io.*;
-import java.util.*;
-
-
-/**
- * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
- * gateway.
- */
-public class SimpleDescriptorHandler {
-
-    private static final Service[] NO_GATEWAY_SERVICES = new Service[]{};
-
-    private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class);
-
-    public static Map<String, File> handle(File desc) throws IOException {
-        return handle(desc, NO_GATEWAY_SERVICES);
-    }
-
-    public static Map<String, File> handle(File desc, Service...gatewayServices) throws IOException {
-        return handle(desc, desc.getParentFile(), gatewayServices);
-    }
-
-    public static Map<String, File> handle(File desc, File destDirectory) throws IOException {
-        return handle(desc, destDirectory, NO_GATEWAY_SERVICES);
-    }
-
-    public static Map<String, File> handle(File desc, File destDirectory, Service...gatewayServices) throws IOException {
-        return handle(SimpleDescriptorFactory.parse(desc.getAbsolutePath()), desc.getParentFile(), destDirectory, gatewayServices);
-    }
-
-    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory) {
-        return handle(desc, srcDirectory, destDirectory, NO_GATEWAY_SERVICES);
-    }
-
-    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) {
-        Map<String, File> result = new HashMap<>();
-
-        File topologyDescriptor;
-
-        DefaultServiceDiscoveryConfig sdc = new DefaultServiceDiscoveryConfig(desc.getDiscoveryAddress());
-        sdc.setUser(desc.getDiscoveryUser());
-        sdc.setPasswordAlias(desc.getDiscoveryPasswordAlias());
-        ServiceDiscovery sd = ServiceDiscoveryFactory.get(desc.getDiscoveryType(), gatewayServices);
-        ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
-
-        Map<String, List<String>> serviceURLs = new HashMap<>();
-
-        if (cluster != null) {
-            for (SimpleDescriptor.Service descService : desc.getServices()) {
-                String serviceName = descService.getName();
-
-                List<String> descServiceURLs = descService.getURLs();
-                if (descServiceURLs == null || descServiceURLs.isEmpty()) {
-                    descServiceURLs = cluster.getServiceURLs(serviceName);
-                }
-
-                // If there is at least one URL associated with the service, then add it to the map
-                if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
-                    serviceURLs.put(serviceName, descServiceURLs);
-                } else {
-                    log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
-                    throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName +
-                                                    ". Topology update aborted!");
-                }
-            }
-        } else {
-            log.failedToDiscoverClusterServices(desc.getClusterName());
-        }
-
-        topologyDescriptor = null;
-        File providerConfig = null;
-        try {
-            // Verify that the referenced provider configuration exists before attempting to reading it
-            providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory);
-            if (providerConfig == null) {
-                log.failedToResolveProviderConfigRef(desc.getProviderConfig());
-                throw new IllegalArgumentException("Unresolved provider configuration reference: " +
-                                                   desc.getProviderConfig() + " ; Topology update aborted!");
-            }
-            result.put("reference", providerConfig);
-
-            // TODO: Should the contents of the provider config be validated before incorporating it into the topology?
-
-            String topologyFilename = desc.getName();
-            if (topologyFilename == null) {
-                topologyFilename = desc.getClusterName();
-            }
-            topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
-            FileWriter fw = new FileWriter(topologyDescriptor);
-
-            fw.write("<topology>\n");
-
-            // Copy the externalized provider configuration content into the topology descriptor in-line
-            InputStreamReader policyReader = new InputStreamReader(new FileInputStream(providerConfig));
-            char[] buffer = new char[1024];
-            int count;
-            while ((count = policyReader.read(buffer)) > 0) {
-                fw.write(buffer, 0, count);
-            }
-            policyReader.close();
-
-            // Write the service declarations
-            for (String serviceName : serviceURLs.keySet()) {
-                fw.write("    <service>\n");
-                fw.write("        <role>" + serviceName + "</role>\n");
-                for (String url : serviceURLs.get(serviceName)) {
-                    fw.write("        <url>" + url + "</url>\n");
-                }
-                fw.write("    </service>\n");
-            }
-
-            fw.write("</topology>\n");
-
-            fw.flush();
-            fw.close();
-        } catch (IOException e) {
-            log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
-            topologyDescriptor.delete();
-        }
-
-        result.put("topology", topologyDescriptor);
-        return result;
-    }
-
-
-    private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
-        File providerConfig;
-
-        // If the reference includes a path
-        if (reference.contains(File.separator)) {
-            // Check if it's an absolute path
-            providerConfig = new File(reference);
-            if (!providerConfig.exists()) {
-                // If it's not an absolute path, try treating it as a relative path
-                providerConfig = new File(srcDirectory, reference);
-                if (!providerConfig.exists()) {
-                    providerConfig = null;
-                }
-            }
-        } else { // No file path, just a name
-            // Check if it's co-located with the referencing descriptor
-            providerConfig = new File(srcDirectory, reference);
-            if (!providerConfig.exists()) {
-                // Check the shared-providers config location
-                File sharedProvidersDir = new File(srcDirectory, "../shared-providers");
-                if (sharedProvidersDir.exists()) {
-                    providerConfig = new File(sharedProvidersDir, reference);
-                    if (!providerConfig.exists()) {
-                        // Check if it's a valid name without the extension
-                        providerConfig = new File(sharedProvidersDir, reference + ".xml");
-                        if (!providerConfig.exists()) {
-                            providerConfig = null;
-                        }
-                    }
-                }
-            }
-        }
-
-        return providerConfig;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
deleted file mode 100644
index 32ceba9..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.simple;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.ArrayList;
-import java.util.List;
-
-class SimpleDescriptorImpl implements SimpleDescriptor {
-
-    @JsonProperty("discovery-type")
-    private String discoveryType;
-
-    @JsonProperty("discovery-address")
-    private String discoveryAddress;
-
-    @JsonProperty("discovery-user")
-    private String discoveryUser;
-
-    @JsonProperty("discovery-pwd-alias")
-    private String discoveryPasswordAlias;
-
-    @JsonProperty("provider-config-ref")
-    private String providerConfig;
-
-    @JsonProperty("cluster")
-    private String cluster;
-
-    @JsonProperty("services")
-    private List<ServiceImpl> services;
-
-    private String name = null;
-
-    void setName(String name) {
-        this.name = name;
-    }
-
-    @Override
-    public String getName() {
-        return name;
-    }
-
-    @Override
-    public String getDiscoveryType() {
-        return discoveryType;
-    }
-
-    @Override
-    public String getDiscoveryAddress() {
-        return discoveryAddress;
-    }
-
-    @Override
-    public String getDiscoveryUser() {
-        return discoveryUser;
-    }
-
-    @Override
-    public String getDiscoveryPasswordAlias() {
-        return discoveryPasswordAlias;
-    }
-
-    @Override
-    public String getClusterName() {
-        return cluster;
-    }
-
-    @Override
-    public String getProviderConfig() {
-        return providerConfig;
-    }
-
-    @Override
-    public List<Service> getServices() {
-        List<Service> result = new ArrayList<>();
-        result.addAll(services);
-        return result;
-    }
-
-    public static class ServiceImpl implements Service {
-        private String name;
-        private List<String> urls;
-
-        @Override
-        public String getName() {
-            return name;
-        }
-
-        @Override
-        public List<String> getURLs() {
-            return urls;
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
deleted file mode 100644
index cf9aa28..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.simple;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-
-@Messages(logger="org.apache.gateway.topology.simple")
-public interface SimpleDescriptorMessages {
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Service discovery for cluster {0} failed.")
-    void failedToDiscoverClusterServices(final String cluster);
-
-    @Message(level = MessageLevel.ERROR,
-            text = "No URLs were discovered for {0} in the {1} cluster.")
-    void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Failed to resolve the referenced provider configuration {0}.")
-    void failedToResolveProviderConfigRef(final String providerConfigRef);
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Error generating topology {0} from simple descriptor: {1}")
-    void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
-                                                      @StackTrace( level = MessageLevel.DEBUG ) Exception e );
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/knox/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
new file mode 100644
index 0000000..3e14a1d
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+public class DefaultServiceDiscoveryConfig implements ServiceDiscoveryConfig {
+    private String address  = null;
+    private String user     = null;
+    private String pwdAlias = null;
+
+    public DefaultServiceDiscoveryConfig(String address) {
+        this.address = address;
+    }
+
+    public void setUser(String username) {
+        this.user = username;
+    }
+
+    public void setPasswordAlias(String alias) {
+        this.pwdAlias = alias;
+    }
+
+    public String getAddress() {
+        return address;
+    }
+
+    public String getUser() {
+        return user;
+    }
+
+    public String getPasswordAlias() {
+        return pwdAlias;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryFactory.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryFactory.java
new file mode 100644
index 0000000..a2a727d
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryFactory.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+import org.apache.knox.gateway.services.Service;
+
+import java.lang.reflect.Field;
+import java.util.ServiceLoader;
+
+/**
+ * Creates instances of ServiceDiscovery implementations.
+ *
+ * This factory uses the ServiceLoader mechanism to load ServiceDiscovery implementations as extensions.
+ *
+ */
+public abstract class ServiceDiscoveryFactory {
+
+    private static final Service[] NO_GATEWAY_SERVICS = new Service[]{};
+
+
+    public static ServiceDiscovery get(String type) {
+        return get(type, NO_GATEWAY_SERVICS);
+    }
+
+
+    public static ServiceDiscovery get(String type, Service...gatewayServices) {
+        ServiceDiscovery sd  = null;
+
+        // Look up the available ServiceDiscovery types
+        ServiceLoader<ServiceDiscoveryType> loader = ServiceLoader.load(ServiceDiscoveryType.class);
+        for (ServiceDiscoveryType sdt : loader) {
+            if (sdt.getType().equalsIgnoreCase(type)) {
+                try {
+                    ServiceDiscovery instance = sdt.newInstance();
+                    // Make sure the type reported by the instance matches the type declared by the factory
+                    // (is this necessary?)
+                    if (instance.getType().equalsIgnoreCase(type)) {
+                        sd = instance;
+
+                        // Inject any gateway services that were specified, and which are referenced in the impl
+                        if (gatewayServices != null && gatewayServices.length > 0) {
+                            for (Field field : sd.getClass().getDeclaredFields()) {
+                                if (field.getDeclaredAnnotation(GatewayService.class) != null) {
+                                    for (Service s : gatewayServices) {
+                                        if (s != null) {
+                                            if (field.getType().isAssignableFrom(s.getClass())) {
+                                                field.setAccessible(true);
+                                                field.set(sd, s);
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                        break;
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return sd;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptor.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptor.java
new file mode 100644
index 0000000..85c0535
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptor.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import java.util.List;
+
+public interface SimpleDescriptor {
+
+    String getName();
+
+    String getDiscoveryType();
+
+    String getDiscoveryAddress();
+
+    String getDiscoveryUser();
+
+    String getDiscoveryPasswordAlias();
+
+    String getClusterName();
+
+    String getProviderConfig();
+
+    List<Service> getServices();
+
+
+    interface Service {
+        String getName();
+
+        List<String> getURLs();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactory.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactory.java
new file mode 100644
index 0000000..254dca1
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactory.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+
+public class SimpleDescriptorFactory {
+
+    /**
+     * Create a SimpleDescriptor from the specified file.
+     *
+     * @param path The path to the file.
+     * @return A SimpleDescriptor based on the contents of the file.
+     *
+     * @throws IOException
+     */
+    public static SimpleDescriptor parse(String path) throws IOException {
+        SimpleDescriptor sd;
+
+        if (path.endsWith(".json")) {
+            sd = parseJSON(path);
+        } else if (path.endsWith(".yml")) {
+            sd = parseYAML(path);
+        } else {
+           throw new IllegalArgumentException("Unsupported simple descriptor format: " + path.substring(path.lastIndexOf('.')));
+        }
+
+        return sd;
+    }
+
+
+    static SimpleDescriptor parseJSON(String path) throws IOException {
+        final ObjectMapper mapper = new ObjectMapper();
+        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
+        if (sd != null) {
+            sd.setName(FilenameUtils.getBaseName(path));
+        }
+        return sd;
+    }
+
+
+    static SimpleDescriptor parseYAML(String path) throws IOException {
+        final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
+        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
+        if (sd != null) {
+            sd.setName(FilenameUtils.getBaseName(path));
+        }
+        return sd;
+    }
+
+}


[03/22] knox git commit: KNOX-1058 - Fix JWTToken.parseToken

Posted by mo...@apache.org.
KNOX-1058 - Fix JWTToken.parseToken


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/935f81fb
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/935f81fb
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/935f81fb

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 935f81fb0f446a18eb09d5c710f679e4012a7cc1
Parents: c833bf9
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Sep 22 10:36:27 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Fri Sep 22 10:36:27 2017 +0100

----------------------------------------------------------------------
 .../services/security/token/impl/JWTToken.java      | 16 +++-------------
 .../services/security/token/impl/JWTTokenTest.java  | 10 +++-------
 2 files changed, 6 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/935f81fb/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
index 49d8609..b7b8649 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
@@ -40,13 +40,8 @@ public class JWTToken implements JWT {
 
   SignedJWT jwt = null;
 
-  private JWTToken(byte[] header, byte[] claims, byte[] signature) throws ParseException {
-    try {
-      jwt = new SignedJWT(new Base64URL(new String(header, "UTF8")), new Base64URL(new String(claims, "UTF8")),
-          new Base64URL(new String(signature, "UTF8")));
-    } catch (UnsupportedEncodingException e) {
-      log.unsupportedEncoding(e);
-    }
+  private JWTToken(String header, String claims, String signature) throws ParseException {
+    jwt = new SignedJWT(new Base64URL(header), new Base64URL(claims), new Base64URL(signature));
   }
 
   public JWTToken(String serializedJWT) throws ParseException {
@@ -147,12 +142,7 @@ public class JWTToken implements JWT {
   public static JWTToken parseToken(String wireToken) throws ParseException {
     log.parsingToken(wireToken);
     String[] parts = wireToken.split("\\.");
-    JWTToken jwt = new JWTToken(Base64.decodeBase64(parts[0]), Base64.decodeBase64(parts[1]), Base64.decodeBase64(parts[2]));
-//    System.out.println("header: " + token.header);
-//    System.out.println("claims: " + token.claims);
-//    System.out.println("payload: " + new String(token.payload));
-
-    return jwt;
+    return new JWTToken(parts[0], parts[1], parts[2]);
   }
 
   /* (non-Javadoc)

http://git-wip-us.apache.org/repos/asf/knox/blob/935f81fb/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
index ef4023d..4ed2ecf 100644
--- a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
@@ -34,8 +34,7 @@ import com.nimbusds.jose.crypto.RSASSAVerifier;
 
 public class JWTTokenTest extends org.junit.Assert {
   private static final String JWT_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE0MTY5MjkxMDksImp0aSI6ImFhN2Y4ZDBhOTVjIiwic2NvcGVzIjpbInJlcG8iLCJwdWJsaWNfcmVwbyJdfQ.XCEwpBGvOLma4TCoh36FU7XhUbcskygS81HE1uHLf0E";
-  private static final String HEADER = "{\"alg\":\"RS256\", \"type\":\"JWT\"}";
-  private static final String CLAIMS = "{\"iss\": \"gateway\", \"prn\": \"john.doe@example.com\", \"aud\": \"https://login.example.com\", \"exp\": \"1363360913\"}";
+  private static final String HEADER = "{\"typ\":\"JWT\",\"alg\":\"HS256\"}";
 
   private RSAPublicKey publicKey;
   private RSAPrivateKey privateKey;
@@ -49,15 +48,12 @@ public class JWTTokenTest extends org.junit.Assert {
     privateKey = (RSAPrivateKey) kp.getPrivate();
   }
 
+  @Test
   public void testTokenParsing() throws Exception {
     JWTToken token = JWTToken.parseToken(JWT_TOKEN);
     assertEquals(token.getHeader(), HEADER);
-    assertEquals(token.getClaims(), CLAIMS);
 
-    assertEquals(token.getIssuer(), "gateway");
-    assertEquals(token.getPrincipal(), "john.doe@example.com");
-    assertEquals(token.getAudience(), "https://login.example.com");
-    assertEquals(token.getExpires(), "1363360913");
+    assertEquals(token.getClaim("jti"), "aa7f8d0a95c");
   }
 
   @Test


[14/22] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWTToken.java
index 8b05b4c,0000000..27b1a30
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWTToken.java
@@@ -1,278 -1,0 +1,273 @@@
 +  /**
 +   * Licensed to the Apache Software Foundation (ASF) under one
 +   * or more contributor license agreements.  See the NOTICE file
 +   * distributed with this work for additional information
 +   * regarding copyright ownership.  The ASF licenses this file
 +   * to you under the Apache License, Version 2.0 (the
 +   * "License"); you may not use this file except in compliance
 +   * with the License.  You may obtain a copy of the License at
 +   *
 +   *     http://www.apache.org/licenses/LICENSE-2.0
 +   *
 +   * Unless required by applicable law or agreed to in writing, software
 +   * distributed under the License is distributed on an "AS IS" BASIS,
 +   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 +   * See the License for the specific language governing permissions and
 +   * limitations under the License.
 +   */
 +package org.apache.knox.gateway.services.security.token.impl;
 +
 +import java.io.UnsupportedEncodingException;
 +import java.text.ParseException;
 +import java.util.Date;
 +import java.util.ArrayList;
 +import java.util.List;
++import java.util.Map;
++
 +import org.apache.commons.codec.binary.Base64;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +
 +import com.nimbusds.jose.JOSEException;
 +import com.nimbusds.jose.JWSAlgorithm;
 +import com.nimbusds.jose.JWSHeader;
 +import com.nimbusds.jose.JWSSigner;
 +import com.nimbusds.jose.JWSVerifier;
 +import com.nimbusds.jose.Payload;
 +import com.nimbusds.jose.util.Base64URL;
 +import com.nimbusds.jwt.JWTClaimsSet;
 +import com.nimbusds.jwt.SignedJWT;
 +
 +public class JWTToken implements JWT {
 +  private static JWTProviderMessages log = MessagesFactory.get( JWTProviderMessages.class );
 +
 +  SignedJWT jwt = null;
-   
-   private JWTToken(byte[] header, byte[] claims, byte[] signature) throws ParseException {
-     try {
-       jwt = new SignedJWT(new Base64URL(new String(header, "UTF8")), new Base64URL(new String(claims, "UTF8")), 
-           new Base64URL(new String(signature, "UTF8")));
-     } catch (UnsupportedEncodingException e) {
-       log.unsupportedEncoding(e);
-     }
++
++  private JWTToken(String header, String claims, String signature) throws ParseException {
++    jwt = new SignedJWT(new Base64URL(header), new Base64URL(claims), new Base64URL(signature));
 +  }
 +
 +  public JWTToken(String serializedJWT) throws ParseException {
 +    try {
 +      jwt = SignedJWT.parse(serializedJWT);
 +    } catch (ParseException e) {
 +      log.unableToParseToken(e);
 +      throw e;
 +    }
 +  }
 +
 +  public JWTToken(String alg, String[] claimsArray) {
 +    this(alg, claimsArray, null);
 +  }
 +
 +  public JWTToken(String alg, String[] claimsArray, List<String> audiences) {
 +    JWSHeader header = new JWSHeader(new JWSAlgorithm(alg));
 +
 +    if (claimsArray[2] != null) {
 +      if (audiences == null) {
 +        audiences = new ArrayList<String>();
 +      }
 +      audiences.add(claimsArray[2]);
 +    }
 +    JWTClaimsSet claims = null;
 +    JWTClaimsSet.Builder builder = new JWTClaimsSet.Builder()
 +    .issuer(claimsArray[0])
 +    .subject(claimsArray[1])
 +    .audience(audiences);
 +    if(claimsArray[3] != null) {
 +      builder = builder.expirationTime(new Date(Long.parseLong(claimsArray[3])));
 +    }
-     
++
 +    claims = builder.build();
 +
 +    jwt = new SignedJWT(header, claims);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getPayloadToSign()
 +   */
 +  @Override
 +  public String getHeader() {
 +    JWSHeader header = jwt.getHeader();
 +    return header.toString();
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getPayloadToSign()
 +   */
 +  @Override
 +  public String getClaims() {
 +    String c = null;
 +    JWTClaimsSet claims = null;
 +    try {
 +      claims = (JWTClaimsSet) jwt.getJWTClaimsSet();
 +      c = claims.toJSONObject().toJSONString();
 +    } catch (ParseException e) {
 +      log.unableToParseToken(e);
 +    }
 +    return c;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getPayloadToSign()
 +   */
 +  @Override
 +  public String getPayload() {
 +    Payload payload = jwt.getPayload();
 +    return payload.toString();
 +  }
 +
 +  public String toString() {
 +    return jwt.serialize();
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#setSignaturePayload(byte[])
 +   */
 +  @Override
 +  public void setSignaturePayload(byte[] payload) {
 +//    this.payload = payload;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getSignaturePayload()
 +   */
 +  @Override
 +  public byte[] getSignaturePayload() {
 +    byte[] b = null;
 +    Base64URL b64 = jwt.getSignature();
 +    if (b64 != null) {
 +      b = b64.decode();
 +    }
 +    return b;
 +  }
 +
 +  public static JWTToken parseToken(String wireToken) throws ParseException {
 +    log.parsingToken(wireToken);
 +    String[] parts = wireToken.split("\\.");
-     JWTToken jwt = new JWTToken(Base64.decodeBase64(parts[0]), Base64.decodeBase64(parts[1]), Base64.decodeBase64(parts[2]));
- //    System.out.println("header: " + token.header);
- //    System.out.println("claims: " + token.claims);
- //    System.out.println("payload: " + new String(token.payload));
-     
-     return jwt;
++    return new JWTToken(parts[0], parts[1], parts[2]);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getClaim(java.lang.String)
 +   */
 +  @Override
 +  public String getClaim(String claimName) {
 +    String claim = null;
-     
++
 +    try {
 +      claim = jwt.getJWTClaimsSet().getStringClaim(claimName);
 +    } catch (ParseException e) {
 +      log.unableToParseToken(e);
 +    }
-     
++
 +    return claim;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getSubject()
 +   */
 +  @Override
 +  public String getSubject() {
 +    return getClaim(JWT.SUBJECT);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getIssuer()
 +   */
 +  @Override
 +  public String getIssuer() {
 +    return getClaim(JWT.ISSUER);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getAudience()
 +   */
 +  @Override
 +  public String getAudience() {
 +    String[] claim = null;
 +    String c = null;
 +
 +    claim = getAudienceClaims();
 +    if (claim != null) {
 +      c = claim[0];
 +    }
 +
 +    return c;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getAudienceClaims()
 +   */
 +  @Override
 +  public String[] getAudienceClaims() {
 +    String[] claims = null;
 +
 +    try {
 +      claims = jwt.getJWTClaimsSet().getStringArrayClaim(JWT.AUDIENCE);
 +    } catch (ParseException e) {
 +      log.unableToParseToken(e);
 +    }
 +
 +    return claims;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getExpires()
 +   */
 +  @Override
 +  public String getExpires() {
-     return getClaim(JWT.EXPIRES);
++    Date expires = getExpiresDate();
++    if (expires != null) {
++      return String.valueOf(expires.getTime());
++    }
++    return null;
 +  }
 +
 +  @Override
 +  public Date getExpiresDate() {
 +    Date date = null;
 +    try {
 +      date = jwt.getJWTClaimsSet().getExpirationTime();
 +    } catch (ParseException e) {
 +      log.unableToParseToken(e);
 +    }
 +    return date;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see JWT#getPrincipal()
 +   */
 +  @Override
 +  public String getPrincipal() {
 +    return getClaim(JWT.PRINCIPAL);
 +  }
 +
-   
++
 +  /* (non-Javadoc)
-    * @see JWT#getPrincipal()
++   * @see org.apache.knox.gateway.services.security.token.impl.JWT#sign(JWSSigner)
 +   */
 +  @Override
 +  public void sign(JWSSigner signer) {
 +    try {
 +      jwt.sign(signer);
 +    } catch (JOSEException e) {
 +      log.unableToSignToken(e);
 +    }
 +  }
 +
-   /**
-    * @param verifier
-    * @return
++  /* (non-Javadoc)
++   * @see org.apache.knox.gateway.services.security.token.impl.JWT#verify(JWSVerifier)
 +   */
 +  public boolean verify(JWSVerifier verifier) {
 +    boolean rc = false;
-     
++
 +    try {
 +      rc = jwt.verify(verifier);
 +    } catch (JOSEException e) {
 +      // TODO Auto-generated catch block
 +      log.unableToVerifyToken(e);
 +    }
-     
++
 +    return rc;
-   }  
++  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/test/java/org/apache/knox/gateway/services/security/token/impl/JWTTokenTest.java
index 3ce28c5,0000000..1b0df9e
mode 100644,000000..100644
--- a/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/token/impl/JWTTokenTest.java
+++ b/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/token/impl/JWTTokenTest.java
@@@ -1,213 -1,0 +1,223 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services.security.token.impl;
 +
 +import java.security.KeyPair;
 +import java.security.KeyPairGenerator;
 +import java.security.NoSuchAlgorithmException;
 +import java.security.interfaces.RSAPrivateKey;
 +import java.security.interfaces.RSAPublicKey;
 +import java.util.ArrayList;
++import java.util.Date;
 +
 +import org.junit.Test;
 +
 +import com.nimbusds.jose.JWSAlgorithm;
 +import com.nimbusds.jose.JWSSigner;
 +import com.nimbusds.jose.JWSVerifier;
 +import com.nimbusds.jose.crypto.RSASSASigner;
 +import com.nimbusds.jose.crypto.RSASSAVerifier;
 +
 +public class JWTTokenTest extends org.junit.Assert {
 +  private static final String JWT_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE0MTY5MjkxMDksImp0aSI6ImFhN2Y4ZDBhOTVjIiwic2NvcGVzIjpbInJlcG8iLCJwdWJsaWNfcmVwbyJdfQ.XCEwpBGvOLma4TCoh36FU7XhUbcskygS81HE1uHLf0E";
-   private static final String HEADER = "{\"alg\":\"RS256\", \"type\":\"JWT\"}";
-   private static final String CLAIMS = "{\"iss\": \"gateway\", \"prn\": \"john.doe@example.com\", \"aud\": \"https://login.example.com\", \"exp\": \"1363360913\"}";
++  private static final String HEADER = "{\"typ\":\"JWT\",\"alg\":\"HS256\"}";
 +
 +  private RSAPublicKey publicKey;
 +  private RSAPrivateKey privateKey;
 +
 +  public JWTTokenTest() throws Exception, NoSuchAlgorithmException {
 +    KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
 +    kpg.initialize(2048);
 +
 +    KeyPair kp = kpg.genKeyPair();
 +    publicKey = (RSAPublicKey) kp.getPublic();
 +    privateKey = (RSAPrivateKey) kp.getPrivate();
 +  }
 +
++  @Test
 +  public void testTokenParsing() throws Exception {
 +    JWTToken token = JWTToken.parseToken(JWT_TOKEN);
 +    assertEquals(token.getHeader(), HEADER);
-     assertEquals(token.getClaims(), CLAIMS);
 +
-     assertEquals(token.getIssuer(), "gateway");
-     assertEquals(token.getPrincipal(), "john.doe@example.com");
-     assertEquals(token.getAudience(), "https://login.example.com");
-     assertEquals(token.getExpires(), "1363360913");
++    assertEquals(token.getClaim("jti"), "aa7f8d0a95c");
 +  }
 +
 +  @Test
 +  public void testTokenCreation() throws Exception {
 +    String[] claims = new String[4];
 +    claims[0] = "KNOXSSO";
 +    claims[1] = "john.doe@example.com";
 +    claims[2] = "https://login.example.com";
 +    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
 +    JWTToken token = new JWTToken("RS256", claims);
 +
 +    assertEquals("KNOXSSO", token.getIssuer());
 +    assertEquals("john.doe@example.com", token.getSubject());
 +    assertEquals("https://login.example.com", token.getAudience());
 +  }
 +
 +  @Test
 +  public void testTokenCreationWithAudienceListSingle() throws Exception {
 +    String[] claims = new String[4];
 +    claims[0] = "KNOXSSO";
 +    claims[1] = "john.doe@example.com";
 +    claims[2] = null;
 +    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
 +    ArrayList<String> audiences = new ArrayList<String>();
 +    audiences.add("https://login.example.com");
 +
 +    JWTToken token = new JWTToken("RS256", claims, audiences);
 +
 +    assertEquals("KNOXSSO", token.getIssuer());
 +    assertEquals("john.doe@example.com", token.getSubject());
 +    assertEquals("https://login.example.com", token.getAudience());
 +    assertEquals(1, token.getAudienceClaims().length);
 +  }
 +
 +  @Test
 +  public void testTokenCreationWithAudienceListMultiple() throws Exception {
 +    String[] claims = new String[4];
 +    claims[0] = "KNOXSSO";
 +    claims[1] = "john.doe@example.com";
 +    claims[2] = null;
 +    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
 +    ArrayList<String> audiences = new ArrayList<String>();
 +    audiences.add("https://login.example.com");
 +    audiences.add("KNOXSSO");
 +
 +    JWTToken token = new JWTToken("RS256", claims, audiences);
 +
 +    assertEquals("KNOXSSO", token.getIssuer());
 +    assertEquals("john.doe@example.com", token.getSubject());
 +    assertEquals("https://login.example.com", token.getAudience());
 +    assertEquals(2, token.getAudienceClaims().length);
 +  }
 +
 +  @Test
 +  public void testTokenCreationWithAudienceListCombined() throws Exception {
 +    String[] claims = new String[4];
 +    claims[0] = "KNOXSSO";
 +    claims[1] = "john.doe@example.com";
 +    claims[2] = "LJM";
 +    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
 +    ArrayList<String> audiences = new ArrayList<String>();
 +    audiences.add("https://login.example.com");
 +    audiences.add("KNOXSSO");
 +
 +    JWTToken token = new JWTToken("RS256", claims, audiences);
 +
 +    assertEquals("KNOXSSO", token.getIssuer());
 +    assertEquals("john.doe@example.com", token.getSubject());
 +    assertEquals("https://login.example.com", token.getAudience());
 +    assertEquals(3, token.getAudienceClaims().length);
 +  }
 +
 +  @Test
 +  public void testTokenCreationWithNullAudienceList() throws Exception {
 +    String[] claims = new String[4];
 +    claims[0] = "KNOXSSO";
 +    claims[1] = "john.doe@example.com";
 +    claims[2] = null;
 +    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
 +    ArrayList<String> audiences = null;
 +
 +    JWTToken token = new JWTToken("RS256", claims, audiences);
 +
 +    assertEquals("KNOXSSO", token.getIssuer());
 +    assertEquals("john.doe@example.com", token.getSubject());
 +    assertEquals(null, token.getAudience());
 +    assertArrayEquals(null, token.getAudienceClaims());
 +  }
 +
 +  @Test
 +  public void testTokenCreationRS512() throws Exception {
 +    String[] claims = new String[4];
 +    claims[0] = "KNOXSSO";
 +    claims[1] = "john.doe@example.com";
 +    claims[2] = "https://login.example.com";
 +    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
 +    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
 +
 +    assertEquals("KNOXSSO", token.getIssuer());
 +    assertEquals("john.doe@example.com", token.getSubject());
 +    assertEquals("https://login.example.com", token.getAudience());
 +    assertTrue(token.getHeader().contains(JWSAlgorithm.RS512.getName()));
 +  }
 +
 +  @Test
 +  public void testTokenSignature() throws Exception {
 +    String[] claims = new String[4];
 +    claims[0] = "KNOXSSO";
 +    claims[1] = "john.doe@example.com";
 +    claims[2] = "https://login.example.com";
 +    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
 +    JWTToken token = new JWTToken("RS256", claims);
 +
 +
 +    assertEquals("KNOXSSO", token.getIssuer());
 +    assertEquals("john.doe@example.com", token.getSubject());
 +    assertEquals("https://login.example.com", token.getAudience());
 +
 +    // Sign the token
 +    JWSSigner signer = new RSASSASigner(privateKey);
 +    token.sign(signer);
 +    assertTrue(token.getSignaturePayload().length > 0);
 +
 +    // Verify the signature
 +    JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) publicKey);
 +    assertTrue(token.verify(verifier));
 +  }
 +
 +  @Test
 +  public void testTokenSignatureRS512() throws Exception {
 +    String[] claims = new String[4];
 +    claims[0] = "KNOXSSO";
 +    claims[1] = "john.doe@example.com";
 +    claims[2] = "https://login.example.com";
 +    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
 +    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
 +
 +    assertEquals("KNOXSSO", token.getIssuer());
 +    assertEquals("john.doe@example.com", token.getSubject());
 +    assertEquals("https://login.example.com", token.getAudience());
 +    assertTrue(token.getHeader().contains(JWSAlgorithm.RS512.getName()));
 +
 +    // Sign the token
 +    JWSSigner signer = new RSASSASigner(privateKey);
 +    token.sign(signer);
 +    assertTrue(token.getSignaturePayload().length > 0);
 +
 +    // Verify the signature
 +    JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) publicKey);
 +    assertTrue(token.verify(verifier));
 +  }
 +
++  @Test
++  public void testTokenExpiry() throws Exception {
++    String[] claims = new String[4];
++    claims[0] = "KNOXSSO";
++    claims[1] = "john.doe@example.com";
++    claims[2] = "https://login.example.com";
++    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
++    JWTToken token = new JWTToken("RS256", claims);
++
++    assertNotNull(token.getExpires());
++    assertNotNull(token.getExpiresDate());
++    assertEquals(token.getExpiresDate(), new Date(Long.valueOf(token.getExpires())));
++  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --cc gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestConfig.java
index e7512b3,0000000..8abf5aa
mode 100644,000000..100644
--- a/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestConfig.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestConfig.java
@@@ -1,612 -1,0 +1,617 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import org.apache.commons.lang.StringUtils;
 +import org.apache.hadoop.conf.Configuration;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +
 +import java.io.File;
 +import java.net.InetSocketAddress;
 +import java.net.UnknownHostException;
 +import java.util.ArrayList;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.concurrent.ConcurrentHashMap;
 +
 +public class GatewayTestConfig extends Configuration implements GatewayConfig {
 +
 +  /* Websocket defaults */
 +  public static final boolean DEFAULT_WEBSOCKET_FEATURE_ENABLED = false;
 +  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE = Integer.MAX_VALUE;;
 +  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE = Integer.MAX_VALUE;;
 +  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE = 32768;
 +  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE = 32768;
 +  public static final int DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE = 4096;
 +  public static final int DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT = 60000;
 +  public static final int DEFAULT_WEBSOCKET_IDLE_TIMEOUT = 300000;
 +
 +  private String gatewayHomeDir = "gateway-home";
 +  private String hadoopConfDir = "hadoop";
 +  private String gatewayHost = "localhost";
 +  private int gatewayPort = 0;
 +  private String gatewayPath = "gateway";
 +  private boolean hadoopKerberosSecured = false;
 +  private String kerberosConfig = "/etc/knox/conf/krb5.conf";
 +  private boolean kerberosDebugEnabled = false;
 +  private String kerberosLoginConfig = "/etc/knox/conf/krb5JAASLogin.conf";
 +  private String frontendUrl = null;
 +  private boolean xForwardedEnabled = true;
 +  private String gatewayApplicationsDir = null;
 +  private String gatewayServicesDir;
 +  private String defaultTopologyName = "default";
 +  private List<String> includedSSLCiphers = null;
 +  private List<String> excludedSSLCiphers = null;
 +  private boolean sslEnabled = false;
 +  private String truststoreType = "jks";
 +  private String keystoreType = "jks";
 +  private boolean isTopologyPortMappingEnabled = true;
 +  private ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<>();
 +  private int backupVersionLimit = -1;
 +  private long backupAgeLimit = -1;
 +
 +  public void setGatewayHomeDir( String gatewayHomeDir ) {
 +    this.gatewayHomeDir = gatewayHomeDir;
 +  }
 +
 +  public String getGatewayHomeDir() {
 +    return this.gatewayHomeDir;
 +  }
 +
 +  @Override
 +  public String getGatewayConfDir() {
 +    return gatewayHomeDir;
 +  }
 +
 +  @Override
 +  public String getGatewayDataDir() {
 +    return gatewayHomeDir;
 +  }
 +
 +  @Override
 +  public String getGatewaySecurityDir() {
 +    return gatewayHomeDir + "/security";
 +  }
 +
 +  @Override
 +  public String getGatewayTopologyDir() {
 +    return gatewayHomeDir + "/topologies";
 +  }
 +
 +  @Override
 +  public String getGatewayDeploymentDir() {
 +    return gatewayHomeDir + "/deployments";
 +  }
 +
 +//  public void setDeploymentDir( String clusterConfDir ) {
 +//    this.deployDir = clusterConfDir;
 +//  }
 +
 +  @Override
 +  public String getHadoopConfDir() {
 +    return hadoopConfDir;
 +  }
 +
 +//  public void setHadoopConfDir( String hadoopConfDir ) {
 +//    this.hadoopConfDir = hadoopConfDir;
 +//  }
 +
 +  @Override
 +  public String getGatewayHost() {
 +    return gatewayHost;
 +  }
 +
 +//  public void setGatewayHost( String gatewayHost ) {
 +//    this.gatewayHost = gatewayHost;
 +//  }
 +
 +  @Override
 +  public int getGatewayPort() {
 +    return gatewayPort;
 +  }
 +
 +//  public void setGatewayPort( int gatewayPort ) {
 +//    this.gatewayPort = gatewayPort;
 +//  }
 +
 +  @Override
 +  public String getGatewayPath() {
 +    return gatewayPath;
 +  }
 +
 +  public void setGatewayPath( String gatewayPath ) {
 +    this.gatewayPath = gatewayPath;
 +  }
 +
 +  @Override
 +  public InetSocketAddress getGatewayAddress() throws UnknownHostException {
 +    return new InetSocketAddress( getGatewayHost(), getGatewayPort() );
 +  }
 +
 +
 +  public long getGatewayIdleTimeout() {
 +    return 0l;
 +  }
 +
 +  @Override
 +  public boolean isSSLEnabled() {
 +    return sslEnabled;
 +  }
 +
 +  public void setSSLEnabled( boolean sslEnabled ) {
 +    this.sslEnabled = sslEnabled;
 +  }
 +
 +  @Override
 +  public boolean isHadoopKerberosSecured() {
 +    return hadoopKerberosSecured;
 +  }
 +
 +  public void setHadoopKerberosSecured(boolean hadoopKerberosSecured) {
 +    this.hadoopKerberosSecured = hadoopKerberosSecured;
 +  }
 +
 +  @Override
 +  public String getKerberosConfig() {
 +    return kerberosConfig;
 +  }
 +
 +  public void setKerberosConfig(String kerberosConfig) {
 +    this.kerberosConfig = kerberosConfig;
 +  }
 +
 +  @Override
 +  public boolean isKerberosDebugEnabled() {
 +    return kerberosDebugEnabled;
 +  }
 +
 +  public void setKerberosDebugEnabled(boolean kerberosDebugEnabled) {
 +    this.kerberosDebugEnabled = kerberosDebugEnabled;
 +  }
 +
 +  @Override
 +  public String getKerberosLoginConfig() {
 +    return kerberosLoginConfig;
 +  }
 +
 +  @Override
 +  public String getDefaultTopologyName() {
 +    return defaultTopologyName;
 +  }
 +
 +  public void setDefaultTopologyName( String defaultTopologyName ) {
 +    this.defaultTopologyName = defaultTopologyName;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getDefaultAppRedirectPath()
 +   */
 +  @Override
 +  public String getDefaultAppRedirectPath() {
 +
 +    if(StringUtils.isBlank(this.defaultTopologyName)) {
 +      return "/gateway/sandbox";
 +    } else {
 +      return "/gateway/"+this.defaultTopologyName;
 +    }
 +
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getFrontendUrl()
 +   */
 +  @Override
 +  public String getFrontendUrl() { return frontendUrl; }
 +
 +  public void setFrontendUrl( String frontendUrl ) {
 +    this.frontendUrl = frontendUrl;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getExcludedSSLProtocols()
 +   */
 +  @Override
 +  public List getExcludedSSLProtocols() {
 +    List<String> protocols = new ArrayList<String>();
 +    protocols.add("SSLv3");
 +    return protocols;
 +  }
 +
 +  @Override
 +  public List getIncludedSSLCiphers() {
 +    return includedSSLCiphers;
 +  }
 +
 +  public void setIncludedSSLCiphers( List<String> list ) {
 +    includedSSLCiphers = list;
 +  }
 +
 +  @Override
 +  public List getExcludedSSLCiphers() {
 +    return excludedSSLCiphers;
 +  }
 +
 +  public void setExcludedSSLCiphers( List<String> list ) {
 +    excludedSSLCiphers = list;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#isClientAuthNeeded()
 +   */
 +  @Override
 +  public boolean isClientAuthNeeded() {
 +    // TODO Auto-generated method stub
 +    return false;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststorePath()
 +   */
 +  @Override
 +  public String getTruststorePath() {
 +    // TODO Auto-generated method stub
 +    return null;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTrustAllCerts()
 +   */
 +  @Override
 +  public boolean getTrustAllCerts() {
 +    // TODO Auto-generated method stub
 +    return false;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststoreType()
 +   */
 +  @Override
 +  public String getTruststoreType() {
 +    return truststoreType;
 +  }
 +
 +  public void setTruststoreType( String truststoreType ) {
 +    this.truststoreType = truststoreType;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getKeystoreType()
 +   */
 +  @Override
 +  public String getKeystoreType() {
 +    return keystoreType;
 +  }
 +
 +  public void setKeystoreType( String keystoreType ) {
 +    this.keystoreType = keystoreType;
 +  }
 +
 +  public void setKerberosLoginConfig(String kerberosLoginConfig) {
 +   this.kerberosLoginConfig = kerberosLoginConfig;
 +  }
 +
 +   @Override
 +   public String getGatewayServicesDir() {
 +    if( gatewayServicesDir != null ) {
 +      return gatewayServicesDir;
 +    } else {
 +      File targetDir = new File( System.getProperty( "user.dir" ), "target/services" );
 +      return targetDir.getPath();
 +    }
 +  }
 +
 +  public void setGatewayServicesDir( String gatewayServicesDir ) {
 +    this.gatewayServicesDir = gatewayServicesDir;
 +  }
 +
 +  @Override
 +  public String getGatewayApplicationsDir() {
 +    if( gatewayApplicationsDir != null ) {
 +      return gatewayApplicationsDir;
 +    } else {
 +      return getGatewayConfDir() + "/applications";
 +    }
 +  }
 +
 +  public void setGatewayApplicationsDir( String gatewayApplicationsDir ) {
 +    this.gatewayApplicationsDir = gatewayApplicationsDir;
 +   }
 +
 +  @Override
 +  public boolean isXForwardedEnabled() {
 +    return xForwardedEnabled;
 +  }
 +
 +  public void setXForwardedEnabled(boolean enabled) {
 +    xForwardedEnabled = enabled;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getEphemeralDHKeySize()
 +   */
 +  @Override
 +  public String getEphemeralDHKeySize() {
 +    return "2048";
 +  }
 +
 +  @Override
 +  public int getHttpClientMaxConnections() {
 +    return 16;
 +  }
 +
 +  @Override
 +  public int getHttpClientConnectionTimeout() {
 +    return -1;
 +  }
 +
 +  @Override
 +  public int getHttpClientSocketTimeout() {
 +    return -1;
 +  }
 +
 +  @Override
 +  public int getThreadPoolMax() {
 +    return 16;
 +  }
 +
 +  @Override
 +  public int getHttpServerRequestBuffer() {
 +    return 16*1024;
 +  }
 +
 +  @Override
 +  public int getHttpServerRequestHeaderBuffer() {
 +    return 8*1024;
 +  }
 +
 +  @Override
 +  public int getHttpServerResponseBuffer() {
 +    return 32*1024;
 +  }
 +
 +  @Override
 +  public int getHttpServerResponseHeaderBuffer() {
 +    return 8*1024;
 +  }
 +
 +  public void setGatewayDeploymentsBackupVersionLimit( int newBackupVersionLimit ) {
 +    backupVersionLimit = newBackupVersionLimit;
 +  }
 +
 +  public int getGatewayDeploymentsBackupVersionLimit() {
 +    return backupVersionLimit;
 +  }
 +
 +  public void setTopologyPortMapping(ConcurrentHashMap<String, Integer> topologyPortMapping) {
 +    this.topologyPortMapping = topologyPortMapping;
 +  }
 +
 +  public void setGatewayPortMappingEnabled(
 +      boolean topologyPortMappingEnabled) {
 +    isTopologyPortMappingEnabled = topologyPortMappingEnabled;
 +  }
 +
 +  @Override
 +  public long getGatewayDeploymentsBackupAgeLimit() {
 +    return backupAgeLimit;
 +  }
 +
 +  public void setGatewayDeploymentsBackupAgeLimit( long newBackupAgeLimit ) {
 +    backupAgeLimit = newBackupAgeLimit;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getSigningKeystoreName()
 +   */
 +  @Override
 +  public String getSigningKeystoreName() {
 +    return null;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getSigningKeyAlias()
 +   */
 +  @Override
 +  public String getSigningKeyAlias() {
 +    return null;
 +  }
 +
 +  @Override
 +  public List<String> getGlobalRulesServices() {
 +    ArrayList<String> services = new ArrayList<>();
 +    services.add("WEBHDFS");
 +    services.add("HBASE");
 +    services.add("HIVE");
 +    services.add("OOZIE");
 +    services.add("RESOURCEMANAGER");
 +    services.add("STORM");
 +    return services;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#isWebsocketEnabled()
 +   */
 +  @Override
 +  public boolean isWebsocketEnabled() {
 +    return DEFAULT_WEBSOCKET_FEATURE_ENABLED;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketMaxTextMessageSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxTextMessageSize() {
 +    return DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketMaxBinaryMessageSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxBinaryMessageSize() {
 +    return DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketMaxTextMessageBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxTextMessageBufferSize() {
 +    return DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketMaxBinaryMessageBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxBinaryMessageBufferSize() {
 +    return DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketInputBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketInputBufferSize() {
 +    return DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketAsyncWriteTimeout()
 +   */
 +  @Override
 +  public int getWebsocketAsyncWriteTimeout() {
 +    return DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getWebsocketIdleTimeout()
 +   */
 +  @Override
 +  public int getWebsocketIdleTimeout() {
 +    return DEFAULT_WEBSOCKET_IDLE_TIMEOUT;
 +  }
 +
 +  @Override
 +  public boolean isMetricsEnabled() {
 +    return false;
 +  }
 +
 +  @Override
 +  public boolean isJmxMetricsReportingEnabled() {
 +    return false;
 +  }
 +
 +  @Override
 +  public boolean isGraphiteMetricsReportingEnabled() {
 +    return false;
 +  }
 +
 +  @Override
 +  public String getGraphiteHost() {
 +    return null;
 +  }
 +
 +  @Override
 +  public int getGraphitePort() {
 +    return 0;
 +  }
 +
 +  @Override
 +  public int getGraphiteReportingFrequency() {
 +    return 0;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getMimeTypesToCompress()
 +   */
 +  @Override
 +  public List<String> getMimeTypesToCompress() {
 +    return new ArrayList<String>();
 +  }
 +
 +  @Override
 +  public  boolean isCookieScopingToPathEnabled() {
 +    return false;
 +  }
 +
 +  @Override
 +  public String getHeaderNameForRemoteAddress() {
 +    return "X-Forwarded-For";
 +  }
 +
 +  @Override
 +  public String getAlgorithm() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getPBEAlgorithm() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getTransformation() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getSaltSize() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getIterationCount() {
 +    return null;
 +  }
 +
 +  @Override
 +  public String getKeyLength() {
 +    return null;
 +  }
 +
 +  /**
 +   * Map of Topology names and their ports.
 +   *
 +   * @return
 +   */
 +  @Override
 +  public Map<String, Integer> getGatewayPortMappings() {
 +    return topologyPortMapping;
 +  }
 +
 +  /**
 +   * Is the Port Mapping feature on ?
 +   *
 +   * @return
 +   */
 +  @Override
 +  public boolean isGatewayPortMappingEnabled() {
 +    return isTopologyPortMappingEnabled;
 +  }
 +
 +  @Override
 +  public boolean isGatewayServerHeaderEnabled() {
 +	return false;
 +  }
++
++  @Override
++  public boolean isClientAuthWanted() {
++    return false;
++  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-util-common/src/main/java/org/apache/knox/gateway/util/HttpUtils.java
----------------------------------------------------------------------
diff --cc gateway-util-common/src/main/java/org/apache/knox/gateway/util/HttpUtils.java
index e87c3fb,0000000..8dc264b
mode 100644,000000..100644
--- a/gateway-util-common/src/main/java/org/apache/knox/gateway/util/HttpUtils.java
+++ b/gateway-util-common/src/main/java/org/apache/knox/gateway/util/HttpUtils.java
@@@ -1,123 -1,0 +1,122 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * <p/>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p/>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.util;
 +
 +import java.io.UnsupportedEncodingException;
 +import java.net.URL;
 +import java.net.URLDecoder;
++import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.HashMap;
- import java.util.LinkedHashMap;
- import java.util.LinkedList;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.StringTokenizer;
 +
 +public class HttpUtils {
 +
 +  public static Map<String, List<String>> splitQuery(String queryString)
 +      throws UnsupportedEncodingException {
-     final Map<String, List<String>> queryPairs = new LinkedHashMap<String, List<String>>();
++    final Map<String, List<String>> queryPairs = new HashMap<String, List<String>>();
 +    if (queryString == null || queryString.trim().isEmpty()) {
 +      return queryPairs;
 +    }
 +    final String[] pairs = queryString.split("&");
 +    for (String pair : pairs) {
 +      final int idx = pair.indexOf("=");
 +      final String key = idx > 0 ? URLDecoder.decode(pair.substring(0, idx), "UTF-8") : pair;
 +      if (!queryPairs.containsKey(key)) {
-         queryPairs.put(key, new LinkedList<String>());
++        queryPairs.put(key, new ArrayList<String>());
 +      }
 +      final String value = idx > 0 && pair.length() > idx + 1 
 +          ? URLDecoder.decode(pair.substring(idx + 1), "UTF-8") : "";
 +      queryPairs.get(key).add(value);
 +    }
 +    return queryPairs;
 +  }
 +
 +  public static Map<String,String[]> parseQueryString( String queryString ) {
 +    Map<String,String[]> map = new HashMap<>();
 +    if( queryString != null && !queryString.isEmpty() ) {
 +      StringTokenizer parser = new StringTokenizer( queryString, "&?;=", true );
 +      String name = null;
 +      String value = null;
 +      while( parser.hasMoreTokens() ) {
 +        String token = parser.nextToken();
 +        String ttoken = token.trim();
 +        if( ttoken.length() == 1 ) {
 +          char c = ttoken.charAt( 0 );
 +          switch( c ) {
 +            case '&':
 +            case '?':
 +            case ';':
 +              addQueryStringParam( map, name, value );
 +              name = null;
 +              value = null;
 +              continue;
 +            case '=':
 +              if( name == null ) {
 +                name = "";
 +                value = "";
 +              } else if( name.isEmpty() ) {
 +                addQueryStringParam( map, name, value );
 +                name = "";
 +                value = "";
 +              } else {
 +                value = "";
 +              }
 +              continue;
 +          }
 +        }
 +        if( name == null ) {
 +          name = token;
 +        } else {
 +          value = token;
 +        }
 +      } // while
 +      if( name != null ) {
 +        addQueryStringParam( map, name, value );
 +      }
 +    }
 +    return map;
 +  }
 +
 +  private static final String urlDecodeUtf8( String s ) {
 +    if( s != null ) {
 +      try {
 +        s = URLDecoder.decode( s, "UTF-8" );
 +      } catch( UnsupportedEncodingException e ) {
 +        // Ignore it.
 +      }
 +    }
 +    return s;
 +  }
 +
 +  static final void addQueryStringParam( final Map<String,String[]> map, String name, String value ) {
 +    name = urlDecodeUtf8( name );
 +    value = urlDecodeUtf8( value );
 +    String[] values = map.get( name );
 +    if( values == null ) {
 +      values = new String[]{ value };
 +    } else {
 +      values = Arrays.copyOf( values, values.length + 1 );
 +      values[ values.length-1 ] = value;
 +    }
 +    map.put( name, values );
 +  }
 +
 +}


[17/22] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
Merge branch 'master' into KNOX-998-Package_Restructuring

# Conflicts:
#	gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
#	gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
#	gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
#	gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
#	gateway-server/src/main/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityService.java
#	gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
#	gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
#	gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/JWTokenAuthority.java
#	gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWTToken.java


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/b3107e91
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/b3107e91
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/b3107e91

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: b3107e91c5466619e33d0ccb68ab0a85a40d96c3
Parents: 416ee7c a841e26
Author: Sandeep More <mo...@apache.org>
Authored: Mon Sep 25 15:29:24 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Sep 25 15:29:24 2017 -0400

----------------------------------------------------------------------
 gateway-discovery-ambari/pom.xml                |  66 ++
 .../discovery/ambari/AmbariCluster.java         | 114 +++
 .../discovery/ambari/AmbariComponent.java       |  76 ++
 .../ambari/AmbariServiceDiscovery.java          | 291 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 ++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/AmbariServiceURLCreator.java         | 184 ++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 +++++++++++++++++++
 .../ha/provider/impl/DefaultURLManager.java     |   8 +-
 ...entityAsserterHttpServletRequestWrapper.java |  25 +-
 .../filter/JWTAccessTokenAssertionFilter.java   |  23 +-
 .../jwt/filter/JWTAuthCodeAssertionFilter.java  |  16 +-
 .../federation/AbstractJWTFilterTest.java       |  18 +-
 gateway-release/home/conf/descriptors/README    |   1 +
 .../home/conf/shared-providers/README           |   1 +
 gateway-release/pom.xml                         |   4 +
 gateway-server/pom.xml                          |   5 +
 .../DefaultServiceDiscoveryConfig.java          |  48 ++
 .../discovery/ServiceDiscoveryFactory.java      |  81 ++
 .../topology/simple/SimpleDescriptor.java       |  46 +
 .../simple/SimpleDescriptorFactory.java         |  71 ++
 .../simple/SimpleDescriptorHandler.java         | 186 ++++
 .../topology/simple/SimpleDescriptorImpl.java   | 111 +++
 .../simple/SimpleDescriptorMessages.java        |  44 +
 .../apache/knox/gateway/GatewayMessages.java    |   9 +-
 .../gateway/config/impl/GatewayConfigImpl.java  |  10 +
 .../services/DefaultGatewayServices.java        |   3 +-
 .../services/security/impl/JettySSLService.java |  11 +-
 .../impl/DefaultTokenAuthorityService.java      |  21 +-
 .../topology/impl/DefaultTopologyService.java   | 281 +++++-
 .../builder/BeanPropertyTopologyBuilder.java    |   2 +-
 .../impl/DefaultTokenAuthorityServiceTest.java  | 160 ++++
 .../PropertiesFileServiceDiscoveryTest.java     |  90 ++
 .../discovery/ServiceDiscoveryFactoryTest.java  |  81 ++
 .../test/extension/DummyServiceDiscovery.java   |  66 ++
 .../extension/DummyServiceDiscoveryType.java    |  32 +
 .../PropertiesFileServiceDiscovery.java         | 108 +++
 .../PropertiesFileServiceDiscoveryType.java     |  35 +
 .../extension/SneakyServiceDiscoveryImpl.java   |  40 +
 .../extension/SneakyServiceDiscoveryType.java   |  33 +
 .../simple/SimpleDescriptorFactoryTest.java     | 218 +++++
 .../simple/SimpleDescriptorHandlerTest.java     | 239 ++++++
 .../topology/DefaultTopologyServiceTest.java    |  63 +-
 ...eway.topology.discovery.ServiceDiscoveryType |  21 +
 .../resources/keystores/server-keystore.jks     | Bin 0 -> 1387 bytes
 .../topology/file/ambari-cluster-policy.xml     |  74 ++
 .../topology/file/simple-topology-four.json     |  18 +
 .../service/knoxsso/WebSSOResourceTest.java     |  14 +-
 .../knoxtoken/TokenServiceResourceTest.java     |  14 +-
 .../topology/discovery/GatewayService.java      |  29 +
 .../topology/discovery/ServiceDiscovery.java    |  76 ++
 .../discovery/ServiceDiscoveryConfig.java       |  42 +
 .../discovery/ServiceDiscoveryType.java         |  40 +
 .../knox/gateway/config/GatewayConfig.java      |   2 +
 .../security/token/JWTokenAuthority.java        |  19 +-
 .../services/security/token/impl/JWT.java       |  39 +-
 .../services/security/token/impl/JWTToken.java  |  47 +-
 .../security/token/impl/JWTTokenTest.java       |  24 +-
 .../apache/knox/gateway/GatewayTestConfig.java  |   5 +
 .../org/apache/knox/gateway/util/HttpUtils.java |   7 +-
 pom.xml                                         |  27 +-
 62 files changed, 4228 insertions(+), 182 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/provider/impl/DefaultURLManager.java
----------------------------------------------------------------------
diff --cc gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/provider/impl/DefaultURLManager.java
index a309c19,0000000..f6ad025
mode 100644,000000..100644
--- a/gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/provider/impl/DefaultURLManager.java
+++ b/gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/provider/impl/DefaultURLManager.java
@@@ -1,100 -1,0 +1,98 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * <p/>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p/>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.ha.provider.impl;
 +
 +import com.google.common.collect.Lists;
 +import org.apache.knox.gateway.ha.provider.HaServiceConfig;
 +import org.apache.knox.gateway.ha.provider.URLManager;
 +import org.apache.knox.gateway.ha.provider.impl.i18n.HaMessages;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +
 +import java.net.URI;
 +import java.util.List;
 +import java.util.concurrent.ConcurrentLinkedQueue;
 +
 +public class DefaultURLManager implements URLManager {
 +
 +  private static final HaMessages LOG = MessagesFactory.get(HaMessages.class);
 +
 +  private ConcurrentLinkedQueue<String> urls = new ConcurrentLinkedQueue<>();
 +
-   public DefaultURLManager() {
-   }
 +
 +  @Override
 +  public boolean supportsConfig(HaServiceConfig config) {
 +    return true;
 +  }
 +
 +  @Override
 +  public void setConfig(HaServiceConfig config) {
 +    //no-op
 +  }
 +
 +  @Override
-   public String getActiveURL() {
++  public synchronized String getActiveURL() {
 +    return urls.peek();
 +  }
 +
 +  @Override
 +  public synchronized void setActiveURL(String url) {
 +    String top = urls.peek();
 +    if (top.equalsIgnoreCase(url)) {
 +      return;
 +    }
 +    if (urls.contains(url)) {
 +      urls.remove(url);
 +      List<String> remainingList = getURLs();
 +      urls.clear();
 +      urls.add(url);
 +      urls.addAll(remainingList);
 +    }
 +  }
 +
 +  @Override
-   public List<String> getURLs() {
++  public synchronized List<String> getURLs() {
 +    return Lists.newArrayList(urls.iterator());
 +  }
 +
 +  @Override
 +  public synchronized void setURLs(List<String> urls) {
-     if ( urls != null && !urls.isEmpty()) {
++    if (urls != null && !urls.isEmpty()) {
 +      this.urls.clear();
 +      this.urls.addAll(urls);
 +    }
 +  }
 +
 +  @Override
 +  public synchronized void markFailed(String url) {
 +    String top = urls.peek();
 +    boolean pushToBottom = false;
 +    URI topUri = URI.create(top);
 +    URI incomingUri = URI.create(url);
 +    String topHostPort = topUri.getHost() + ":" + topUri.getPort();
 +    String incomingHostPort = incomingUri.getHost() + ":" + incomingUri.getPort();
 +    if ( topHostPort.equals(incomingHostPort) ) {
 +      pushToBottom = true;
 +    }
 +    //put the failed url at the bottom
 +    if ( pushToBottom ) {
 +      String failed = urls.poll();
 +      urls.offer(failed);
 +      LOG.markedFailedUrl(failed, urls.peek());
 +    }
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-provider-identity-assertion-common/src/main/java/org/apache/knox/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
----------------------------------------------------------------------
diff --cc gateway-provider-identity-assertion-common/src/main/java/org/apache/knox/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
index a484d16,0000000..af1fbd6
mode 100644,000000..100644
--- a/gateway-provider-identity-assertion-common/src/main/java/org/apache/knox/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/knox/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
@@@ -1,295 -1,0 +1,304 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.identityasserter.common.filter;
 +
 +import org.apache.commons.io.IOUtils;
 +import org.apache.knox.gateway.SpiGatewayMessages;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.security.PrimaryPrincipal;
 +import org.apache.knox.gateway.servlet.SynchronousServletInputStreamAdapter;
 +import org.apache.knox.gateway.util.HttpUtils;
 +
 +import javax.servlet.ServletInputStream;
 +import javax.servlet.http.HttpServletRequest;
 +import javax.servlet.http.HttpServletRequestWrapper;
 +import java.io.ByteArrayInputStream;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.io.UnsupportedEncodingException;
 +import java.net.URLEncoder;
 +import java.nio.charset.Charset;
 +import java.security.Principal;
 +import java.util.ArrayList;
 +import java.util.List;
 +import java.util.Collection;
 +import java.util.Collections;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.Map;
 +
 +public class IdentityAsserterHttpServletRequestWrapper extends HttpServletRequestWrapper {
 +
 +private static SpiGatewayMessages log = MessagesFactory.get( SpiGatewayMessages.class );
 +
 +  private static final String PRINCIPAL_PARAM = "user.name";
 +  private static final String DOAS_PRINCIPAL_PARAM = "doAs";
 +  
 +  String username = null;
 +
 +  public IdentityAsserterHttpServletRequestWrapper( HttpServletRequest request, String principal ) {
 +    super(request);
 +    username = principal;
 +  }
 +
 +  @Override
 +  public Principal getUserPrincipal() {
 +    return new PrimaryPrincipal(username);
 +  }
 +
 +  @Override
 +  public String getParameter(String name) {
 +    if (name.equals(PRINCIPAL_PARAM)) {
 +      return username;
 +    }
 +    return super.getParameter(name);
 +  }
 +  
-   @SuppressWarnings("rawtypes")
 +  @Override
-   public Map getParameterMap() {
-     Map map = null;
++  public Map<String, String[]> getParameterMap() {
++    Map<String, String[]> map = null;
 +    try {
-       map = getParams();
++      map = convertValuesToStringArrays(getParams());
 +    } catch (UnsupportedEncodingException e) {
 +      log.unableToGetParamsFromQueryString(e);
 +    }
 +    return map;
 +  }
 +
-   @SuppressWarnings({ "unchecked", "rawtypes" })
++  private Map<String, String[]> convertValuesToStringArrays(Map<String, List<String>> params) {
++    Map<String, String[]> arrayMap = new HashMap<String, String[]>();
++    String name = null;
++    Enumeration<String> names = getParameterNames();
++    while (names.hasMoreElements()) {
++      name = (String) names.nextElement();
++      arrayMap.put(name, getParameterValues(name));
++    }
++    return arrayMap;
++  }
++
 +  @Override
-   public Enumeration getParameterNames() {
++  public Enumeration<String> getParameterNames() {
 +    Enumeration<String> e = null;
 +    Map<String, List<String>> params;
 +    try {
 +      params = getParams();
 +      if (params == null) {
 +        params = new HashMap<>();
 +      }
 +      e = Collections.enumeration((Collection<String>) params.keySet());
 +    } catch (UnsupportedEncodingException e1) {
 +      log.unableToGetParamsFromQueryString(e1);
 +    }
 +
 +    return e;
 +  }
 +
 +  @Override
 +  public String[] getParameterValues(String name) {
-     String[] p = null;
++    String[] p = {};
 +    Map<String, List<String>> params;
 +    try {
 +      params = getParams();
 +      if (params == null) {
 +        params = new HashMap<>();
 +      }
-       p = (String[]) params.get(name).toArray();
++      p = (String[]) params.get(name).toArray(p);
 +    } catch (UnsupportedEncodingException e) {
 +      log.unableToGetParamsFromQueryString(e);
 +    }
 +
 +    return p;
 +  }
 +
 +  private Map<String, List<String>> getParams( String qString )
 +      throws UnsupportedEncodingException {
 +    Map<String, List<String>> params = null;
 +    if (getMethod().equals("GET")) {
 +      if (qString != null && qString.length() > 0) {
 +        params = HttpUtils.splitQuery( qString );
 +      }
 +      else {
 +        params = new HashMap<>();
 +      }
 +    }
 +    else {
 +      if (qString == null || qString.length() == 0) {
 +        return null;
 +      }
 +      else {
 +        params = HttpUtils.splitQuery( qString );
 +      }
 +    }  
 +    return params;
 +  }
 +
 +  private Map<String, List<String>> getParams()
 +      throws UnsupportedEncodingException {
 +    return getParams( super.getQueryString() );
 +  }
 +
 +  @Override
 +  public String getQueryString() {
 +    String q = null;
 +    Map<String, List<String>> params;
 +    try {
 +      params = getParams();
 +      if (params == null) {
 +        params = new HashMap<>();
 +      }
 +      ArrayList<String> al = new ArrayList<String>();
 +      al.add(username);
 +
 +      List<String> principalParamNames = getImpersonationParamNames();
 +      params = scrubOfExistingPrincipalParams(params, principalParamNames);
 +
 +      if ("true".equals(System.getProperty(GatewayConfig.HADOOP_KERBEROS_SECURED))) {
 +        params.put(DOAS_PRINCIPAL_PARAM, al);
 +      } else {
 +        params.put(PRINCIPAL_PARAM, al);
 +      }
 +
 +      String encoding = getCharacterEncoding();
 +      if (encoding == null) {
 +        encoding = Charset.defaultCharset().name();
 +      }
 +      q = urlEncode(params, encoding);
 +    } catch (UnsupportedEncodingException e) {
 +      log.unableToGetParamsFromQueryString(e);
 +    }
 +
 +    return q;
 +  }
 +
 +  private List<String> getImpersonationParamNames() {
 +    // TODO: let's have service definitions register their impersonation
 +    // params in a future release and get this list from a central registry.
 +    // This will provide better coverage of protection by removing any
 +    // prepopulated impersonation params.
 +    ArrayList<String> principalParamNames = new ArrayList<String>();
 +    principalParamNames.add(DOAS_PRINCIPAL_PARAM);
 +    principalParamNames.add(PRINCIPAL_PARAM);
 +    return principalParamNames;
 +  }
 +
 +  private Map<String, List<String>> scrubOfExistingPrincipalParams(
 +      Map<String, List<String>> params, List<String> principalParamNames) {
 +    HashSet<String> remove = new HashSet<>();
 +    for (String paramKey : params.keySet()) {
 +      for (String p : principalParamNames) {
 +        if (p.equalsIgnoreCase(paramKey)) {
 +          remove.add(paramKey);
 +          log.possibleIdentitySpoofingAttempt(paramKey);
 +        }
 +      }
 +    }
 +    params.keySet().removeAll(remove);
 +    return params;
 +  }
 +
 +  @Override
 +  public int getContentLength() {
 +    int len;
 +    String contentType = getContentType();
 +    // If the content type is a form we might rewrite the body so default it to -1.
 +    if( contentType != null && contentType.startsWith( "application/x-www-form-urlencoded" ) ) {
 +      len = -1;
 +    } else {
 +      len = super.getContentLength();
 +    }
 +    return len;
 +  }
 +
 +  @Override
 +  public ServletInputStream getInputStream() throws java.io.IOException {
 +    String contentType = getContentType();
 +    if( contentType != null && contentType.startsWith( "application/x-www-form-urlencoded" ) ) {
 +      String encoding = getCharacterEncoding();
 +      if( encoding == null ) {
 +        encoding = Charset.defaultCharset().name();
 +      }
 +      String body = IOUtils.toString( super.getInputStream(), encoding );
 +      Map<String, List<String>> params = getParams( body );
 +      if (params == null) {
 +        params = new HashMap<>();
 +      }
 +      body = urlEncode( params, encoding );
 +      // ASCII is OK here because the urlEncode about should have already escaped
 +      return new ServletInputStreamWrapper( new ByteArrayInputStream( body.getBytes( "US-ASCII" ) ) );
 +    } else {
 +      return super.getInputStream();
 +    }
 +  }
 +
 +  static String urlEncode( String string, String encoding ) {
 +    try {
 +      return URLEncoder.encode( string, encoding );
 +    } catch (UnsupportedEncodingException e) {
 +      throw new UnsupportedOperationException(e);
 +    }
 +  }
 +
 +  public static String urlEncode( Map<String, List<String>> map, String encoding ) {
 +    StringBuilder sb = new StringBuilder();
 +    for( Map.Entry<String,List<String>> entry : map.entrySet() ) {
 +      String name = entry.getKey();
 +      if( name != null && name.length() > 0 ) {
 +        List<String> values = entry.getValue();
 +        if( values == null || values.size() == 0 ) {
 +          sb.append( entry.getKey() );
 +        } else {
 +          for( int i = 0; i < values.size(); i++ ) {
 +            String value = values.get(i);
 +              if( sb.length() > 0 ) {
 +                sb.append( "&" );
 +              }
 +              try {
 +                sb.append( urlEncode( name, encoding ) );
 +                if( value != null ) {
 +                  sb.append("=");
 +                  sb.append(urlEncode(value, encoding));
 +                }
 +              } catch( IllegalArgumentException e ) {
 +                log.skippingUnencodableParameter( name, value, encoding, e );
 +              }
 +          }
 +        }
 +      }
 +    }
 +    return sb.toString();
 +  }
 +
 +  private static class ServletInputStreamWrapper extends
 +      SynchronousServletInputStreamAdapter {
 +
 +    private InputStream stream;
 +
 +    private ServletInputStreamWrapper( InputStream stream ) {
 +      this.stream = stream;
 +    }
 +
 +    @Override
 +    public int read() throws IOException {
 +      return stream.read();
 +    }
 +
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
index f7e3725,0000000..b7e895c
mode 100644,000000..100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
@@@ -1,164 -1,0 +1,165 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.provider.federation.jwt.filter;
 +
 +import java.io.IOException;
 +import java.security.AccessController;
 +import java.security.Principal;
 +import java.text.ParseException;
 +import java.util.HashMap;
 +
 +import javax.security.auth.Subject;
 +import javax.servlet.FilterChain;
 +import javax.servlet.FilterConfig;
 +import javax.servlet.ServletException;
 +import javax.servlet.ServletRequest;
 +import javax.servlet.ServletResponse;
 +import javax.servlet.http.HttpServletRequest;
 +import javax.servlet.http.HttpServletResponse;
 +
 +import org.apache.knox.gateway.filter.security.AbstractIdentityAssertionFilter;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.provider.federation.jwt.JWTMessages;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.registry.ServiceRegistry;
 +import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
 +import org.apache.knox.gateway.services.security.token.TokenServiceException;
++import org.apache.knox.gateway.services.security.token.impl.JWT;
 +import org.apache.knox.gateway.services.security.token.impl.JWTToken;
 +import org.apache.knox.gateway.util.JsonUtils;
 +
 +public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilter {
 +  private static final String SVC_URL = "svc";
 +  private static final String EXPIRES_IN = "expires_in";
 +  private static final String TOKEN_TYPE = "token_type";
 +  private static final String ACCESS_TOKEN = "access_token";
 +  private static final String BEARER = "Bearer ";
 +  private static JWTMessages log = MessagesFactory.get( JWTMessages.class );
 +  private long validity;
 +  private JWTokenAuthority authority = null;
 +  private ServiceRegistry sr;
 +
 +  @Override
 +  public void init( FilterConfig filterConfig ) throws ServletException {
 +    super.init(filterConfig);
 +    String validityStr = filterConfig.getInitParameter("validity");
 +    if (validityStr == null) {
 +      validityStr = "3600"; // 1 hr. in secs
 +    }
 +    validity = Long.parseLong(validityStr);
 +
 +    GatewayServices services = (GatewayServices) filterConfig.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
 +    authority = (JWTokenAuthority) services.getService(GatewayServices.TOKEN_SERVICE);
 +    sr = (ServiceRegistry) services.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
 +  }
-   
++
 +  @Override
 +  public void doFilter(ServletRequest request, ServletResponse response,
 +      FilterChain chain) throws IOException, ServletException {
 +    String jsonResponse = null;
-     
++
 +    String header = ((HttpServletRequest) request).getHeader("Authorization");
 +    if (header != null && header.startsWith(BEARER)) {
 +      // what follows the bearer designator should be the JWT token being used to request or as an access token
 +      String wireToken = header.substring(BEARER.length());
 +      JWTToken token;
 +      try {
 +        token = JWTToken.parseToken(wireToken);
 +      } catch (ParseException e) {
 +        throw new ServletException("ParseException encountered while processing the JWT token: ", e);
 +      }
 +      // ensure that there is a valid jwt token available and that there isn't a misconfiguration of filters
 +      if (token != null) {
 +        try {
 +          authority.verifyToken(token);
 +        }
 +        catch (TokenServiceException e) {
 +          log.unableToVerifyToken(e);
 +        }
 +      }
 +      else {
 +        throw new ServletException("Expected JWT Token not provided as Bearer token");
 +      }
-       
++
 +      // authorization of the user for the requested service (and resource?) should have been done by
 +      // the JWTFederationFilter - once we get here we can assume that it is authorized and we just need
 +      // to assert the identity via an access token
 +
 +      Subject subject = Subject.getSubject(AccessController.getContext());
 +      String principalName = getPrincipalName(subject);
 +      principalName = mapper.mapUserPrincipal(principalName);
-       
++
 +      // calculate expiration timestamp: validity * 1000 + currentTimeInMillis
 +      long expires = System.currentTimeMillis() + validity * 1000;
-       
++
 +      String serviceName = request.getParameter("service-name");
 +      String clusterName = request.getParameter("cluster-name");
 +      String accessToken = getAccessToken(principalName, serviceName, expires);
-       
++
 +      String serviceURL = sr.lookupServiceURL(clusterName, serviceName);
-       
++
 +      HashMap<String, Object> map = new HashMap<>();
 +      // TODO: populate map from JWT authorization code
 +      map.put(ACCESS_TOKEN, accessToken);
 +      map.put(TOKEN_TYPE, BEARER);
 +      map.put(EXPIRES_IN, expires);
-       
++
 +      // TODO: this url needs to be rewritten when in gateway deployments....
 +      map.put(SVC_URL, serviceURL);
-       
++
 +      jsonResponse = JsonUtils.renderAsJsonString(map);
-       
++
 +      response.getWriter().write(jsonResponse);
 +      //KNOX-685: response.getWriter().flush();
 +      return; // break filter chain
 +    }
 +    else {
 +      // no token provided in header
 +      // something is really wrong since the JWTFederationFilter should have verified its existence already
 +      // TODO: may have to check cookie and url as well before sending error
 +      ((HttpServletResponse) response).sendError(HttpServletResponse.SC_UNAUTHORIZED);
 +      return; //break filter chain
 +    }
 +  }
 +
 +  private String getAccessToken(final String principalName, String serviceName, long expires) {
 +    String accessToken = null;
 +
 +    Principal p = new Principal() {
 +
 +      @Override
 +      public String getName() {
 +        // TODO Auto-generated method stub
 +        return principalName;
 +      }
 +    };
-     JWTToken token = null;
++    JWT token = null;
 +    try {
 +      token = authority.issueToken(p, serviceName, "RS256", expires);
 +      // Coverity CID 1327961
 +      if( token != null ) {
 +        accessToken = token.toString();
 +      }
 +    } catch (TokenServiceException e) {
 +      log.unableToIssueToken(e);
 +    }
 +
 +    return accessToken;
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
index 32d0e99,0000000..d367534
mode 100644,000000..100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
@@@ -1,100 -1,0 +1,100 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.provider.federation.jwt.filter;
 +
 +import java.io.IOException;
 +import java.security.AccessController;
 +import java.util.HashMap;
 +
 +import javax.security.auth.Subject;
 +import javax.servlet.FilterChain;
 +import javax.servlet.FilterConfig;
 +import javax.servlet.ServletException;
 +import javax.servlet.ServletRequest;
 +import javax.servlet.ServletResponse;
 +
 +import org.apache.knox.gateway.filter.security.AbstractIdentityAssertionFilter;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.registry.ServiceRegistry;
 +import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
 +import org.apache.knox.gateway.services.security.token.TokenServiceException;
- import org.apache.knox.gateway.services.security.token.impl.JWTToken;
++import org.apache.knox.gateway.services.security.token.impl.JWT;
 +import org.apache.knox.gateway.util.JsonUtils;
 +
 +public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter {
 +  private static final String BEARER = "Bearer ";
-   
++
 +  private JWTokenAuthority authority = null;
 +
 +  private ServiceRegistry sr;
 +
 +  @Override
 +  public void init( FilterConfig filterConfig ) throws ServletException {
 +    super.init(filterConfig);
 +    String validityStr = filterConfig.getInitParameter("validity");
 +    if (validityStr == null) {
 +      validityStr = "3600"; // 1 hr. in secs
 +    }
 +//    validity = Long.parseLong(validityStr);
 +
 +    GatewayServices services = (GatewayServices) filterConfig.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
 +    authority = (JWTokenAuthority) services.getService(GatewayServices.TOKEN_SERVICE);
 +    sr = (ServiceRegistry) services.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
 +  }
-   
++
 +  @Override
 +  public void doFilter(ServletRequest request, ServletResponse response,
 +      FilterChain chain) throws IOException, ServletException {
 +
 +      Subject subject = Subject.getSubject(AccessController.getContext());
 +      String principalName = getPrincipalName(subject);
 +      principalName = mapper.mapUserPrincipal(principalName);
-       JWTToken authCode;
++      JWT authCode;
 +      try {
 +        authCode = authority.issueToken(subject, "RS256");
 +        // get the url for the token service
-         String url = null; 
++        String url = null;
 +        if (sr != null) {
 +          url = sr.lookupServiceURL("token", "TGS");
 +        }
-         
++
 +        HashMap<String, Object> map = new HashMap<>();
 +        // TODO: populate map from JWT authorization code
 +        // Coverity CID 1327960
 +        if( authCode != null ) {
 +          map.put( "iss", authCode.getIssuer() );
 +          map.put( "sub", authCode.getPrincipal() );
 +          map.put( "aud", authCode.getAudience() );
 +          map.put( "exp", authCode.getExpires() );
 +          map.put( "code", authCode.toString() );
 +        }
 +        if (url != null) {
 +          map.put("tke", url);
 +        }
-         
++
 +        String jsonResponse = JsonUtils.renderAsJsonString(map);
-         
++
 +        response.getWriter().write(jsonResponse);
 +        //KNOX-685: response.getWriter().flush();
 +      } catch (TokenServiceException e) {
 +        // TODO Auto-generated catch block
 +        e.printStackTrace();
 +      }
 +      return; // break filter chain
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
index ea56486,0000000..790a7a5
mode 100644,000000..100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
@@@ -1,636 -1,0 +1,636 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.provider.federation;
 +
 +import static org.junit.Assert.fail;
 +
 +import java.io.IOException;
 +import java.net.InetAddress;
 +import java.security.AccessController;
 +import java.security.KeyPair;
 +import java.security.KeyPairGenerator;
 +import java.security.NoSuchAlgorithmException;
 +import java.security.Principal;
 +import java.security.PublicKey;
 +import java.security.cert.Certificate;
 +import java.security.interfaces.RSAPrivateKey;
 +import java.security.interfaces.RSAPublicKey;
 +import java.text.MessageFormat;
 +import java.util.Enumeration;
 +import java.util.List;
 +import java.util.ArrayList;
 +import java.util.Properties;
 +import java.util.Date;
 +import java.util.Set;
 +
 +import javax.security.auth.Subject;
 +import javax.servlet.FilterChain;
 +import javax.servlet.FilterConfig;
 +import javax.servlet.ServletContext;
 +import javax.servlet.ServletException;
 +import javax.servlet.ServletRequest;
 +import javax.servlet.ServletResponse;
 +import javax.servlet.http.HttpServletRequest;
 +import javax.servlet.http.HttpServletResponse;
 +
 +import org.apache.commons.codec.binary.Base64;
 +import org.apache.knox.gateway.provider.federation.jwt.filter.AbstractJWTFilter;
 +import org.apache.knox.gateway.provider.federation.jwt.filter.SSOCookieFederationFilter;
 +import org.apache.knox.gateway.security.PrimaryPrincipal;
 +import org.apache.knox.gateway.services.security.impl.X509CertificateUtil;
 +import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
 +import org.apache.knox.gateway.services.security.token.TokenServiceException;
 +import org.apache.knox.gateway.services.security.token.impl.JWT;
 +import org.apache.knox.gateway.services.security.token.impl.JWTToken;
 +import org.easymock.EasyMock;
 +import org.junit.After;
 +import org.junit.Assert;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +
 +import com.nimbusds.jose.*;
 +import com.nimbusds.jwt.JWTClaimsSet;
 +import com.nimbusds.jwt.SignedJWT;
 +import com.nimbusds.jose.crypto.RSASSASigner;
 +import com.nimbusds.jose.crypto.RSASSAVerifier;
 +
 +public abstract class AbstractJWTFilterTest  {
 +  private static final String SERVICE_URL = "https://localhost:8888/resource";
 +  private static final String dnTemplate = "CN={0},OU=Test,O=Hadoop,L=Test,ST=Test,C=US";
 +
 +  protected AbstractJWTFilter handler = null;
 +  protected static RSAPublicKey publicKey = null;
 +  protected static RSAPrivateKey privateKey = null;
 +  protected static String pem = null;
 +
 +  protected abstract void setTokenOnRequest(HttpServletRequest request, SignedJWT jwt);
 +  protected abstract void setGarbledTokenOnRequest(HttpServletRequest request, SignedJWT jwt);
 +  protected abstract String getAudienceProperty();
 +  protected abstract String getVerificationPemProperty();
 +
 +  private static String buildDistinguishedName(String hostname) {
 +    MessageFormat headerFormatter = new MessageFormat(dnTemplate);
 +    String[] paramArray = new String[1];
 +    paramArray[0] = hostname;
 +    String dn = headerFormatter.format(paramArray);
 +    return dn;
 +  }
 +
 +  @BeforeClass
 +  public static void generateKeys() throws Exception, NoSuchAlgorithmException {
 +    KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
 +    kpg.initialize(2048);
 +    KeyPair KPair = kpg.generateKeyPair();
 +    String dn = buildDistinguishedName(InetAddress.getLocalHost().getHostName());
 +    Certificate cert = X509CertificateUtil.generateCertificate(dn, KPair, 365, "SHA1withRSA");
 +    byte[] data = cert.getEncoded();
 +    Base64 encoder = new Base64( 76, "\n".getBytes( "ASCII" ) );
 +    pem = new String(encoder.encodeToString( data ).getBytes( "ASCII" )).trim();
 +
 +    publicKey = (RSAPublicKey) KPair.getPublic();
 +    privateKey = (RSAPrivateKey) KPair.getPrivate();
 +  }
 +
 +  @After
 +  public void teardown() throws Exception {
 +    handler.destroy();
 +  }
 +
 +  @Test
 +  public void testValidJWT() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL);
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
 +      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
 +      Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
 +      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testValidAudienceJWT() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +      props.put(getAudienceProperty(), "bar");
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL);
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
 +      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
 +      Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
 +      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testInvalidAudienceJWT() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +      props.put(getAudienceProperty(), "foo");
 +      props.put("sso.authentication.provider.url", "https://localhost:8443/gateway/knoxsso/api/v1/websso");
 +
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL);
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
 +      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testValidVerificationPEM() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +
 +//      System.out.println("+" + pem + "+");
 +
 +      props.put(getAudienceProperty(), "bar");
 +      props.put("sso.authentication.provider.url", "https://localhost:8443/gateway/knoxsso/api/v1/websso");
 +      props.put(getVerificationPemProperty(), pem);
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 50000), privateKey, props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL);
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
 +      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
 +      Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
 +      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testExpiredJWT() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() - 1000), privateKey, props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL);
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be false.", !chain.doFilterCalled);
 +      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testValidJWTNoExpiration() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("alice", null, privateKey, props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL).anyTimes();
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
 +      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
 +      Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
 +      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testUnableToParseJWT() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("bob", new Date(new Date().getTime() + 5000), privateKey, props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setGarbledTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL).anyTimes();
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
 +      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testFailedSignatureValidationJWT() throws Exception {
 +    try {
 +      // Create a private key to sign the token
 +      KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
 +      kpg.initialize(1024);
 +
 +      KeyPair kp = kpg.genKeyPair();
 +
 +      Properties props = getProperties();
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("bob", new Date(new Date().getTime() + 5000),
 +                             (RSAPrivateKey)kp.getPrivate(), props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL).anyTimes();
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
 +      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testInvalidVerificationPEM() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +
 +      KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
 +      kpg.initialize(1024);
 +
 +      KeyPair KPair = kpg.generateKeyPair();
 +      String dn = buildDistinguishedName(InetAddress.getLocalHost().getHostName());
 +      Certificate cert = X509CertificateUtil.generateCertificate(dn, KPair, 365, "SHA1withRSA");
 +      byte[] data = cert.getEncoded();
 +      Base64 encoder = new Base64( 76, "\n".getBytes( "ASCII" ) );
 +      String failingPem = new String(encoder.encodeToString( data ).getBytes( "ASCII" )).trim();
 +
 +      props.put(getAudienceProperty(), "bar");
 +      props.put(getVerificationPemProperty(), failingPem);
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 50000), privateKey, props);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(SERVICE_URL);
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be true.", chain.doFilterCalled == false);
 +      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testInvalidIssuer() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("new-issuer", "alice", new Date(new Date().getTime() + 5000), privateKey);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +         new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL);
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
 +      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  @Test
 +  public void testValidIssuerViaConfig() throws Exception {
 +    try {
 +      Properties props = getProperties();
 +      props.setProperty(AbstractJWTFilter.JWT_EXPECTED_ISSUER, "new-issuer");
 +      handler.init(new TestFilterConfig(props));
 +
 +      SignedJWT jwt = getJWT("new-issuer", "alice", new Date(new Date().getTime() + 5000), privateKey);
 +
 +      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +      setTokenOnRequest(request, jwt);
 +
 +      EasyMock.expect(request.getRequestURL()).andReturn(
 +          new StringBuffer(SERVICE_URL)).anyTimes();
 +      EasyMock.expect(request.getQueryString()).andReturn(null);
 +      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
 +          SERVICE_URL);
 +      EasyMock.replay(request);
 +
 +      TestFilterChain chain = new TestFilterChain();
 +      handler.doFilter(request, response, chain);
 +      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled);
 +      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
 +      Assert.assertTrue("No PrimaryPrincipal", principals.size() > 0);
 +      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
 +    } catch (ServletException se) {
 +      fail("Should NOT have thrown a ServletException.");
 +    }
 +  }
 +
 +  protected Properties getProperties() {
 +    Properties props = new Properties();
 +    props.setProperty(
 +        SSOCookieFederationFilter.SSO_AUTHENTICATION_PROVIDER_URL,
 +        "https://localhost:8443/authserver");
 +    return props;
 +  }
 +
 +  protected SignedJWT getJWT(String sub, Date expires, RSAPrivateKey privateKey,
 +      Properties props) throws Exception {
 +    return getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, sub, expires, privateKey);
 +  }
 +
 +  protected SignedJWT getJWT(String issuer, String sub, Date expires, RSAPrivateKey privateKey)
 +      throws Exception {
 +    List<String> aud = new ArrayList<String>();
 +    aud.add("bar");
 +
 +    JWTClaimsSet claims = new JWTClaimsSet.Builder()
 +    .issuer(issuer)
 +    .subject(sub)
 +    .audience(aud)
 +    .expirationTime(expires)
 +    .claim("scope", "openid")
 +    .build();
 +
 +    JWSHeader header = new JWSHeader.Builder(JWSAlgorithm.RS256).build();
 +
 +    SignedJWT signedJWT = new SignedJWT(header, claims);
 +    JWSSigner signer = new RSASSASigner(privateKey);
 +
 +    signedJWT.sign(signer);
 +
 +    return signedJWT;
 +  }
 +
 +  protected static class TestFilterConfig implements FilterConfig {
 +    Properties props = null;
 +
 +    public TestFilterConfig(Properties props) {
 +      this.props = props;
 +    }
 +
 +    @Override
 +    public String getFilterName() {
 +      return null;
 +    }
 +
 +    /* (non-Javadoc)
 +     * @see javax.servlet.FilterConfig#getServletContext()
 +     */
 +    @Override
 +    public ServletContext getServletContext() {
 +//      JWTokenAuthority authority = EasyMock.createNiceMock(JWTokenAuthority.class);
 +//      GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
 +//      EasyMock.expect(services.getService("TokenService").andReturn(authority));
 +//      ServletContext context = EasyMock.createNiceMock(ServletContext.class);
 +//      EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE).andReturn(new DefaultGatewayServices()));
 +      return null;
 +    }
 +
 +    /* (non-Javadoc)
 +     * @see javax.servlet.FilterConfig#getInitParameter(java.lang.String)
 +     */
 +    @Override
 +    public String getInitParameter(String name) {
 +      return props.getProperty(name, null);
 +    }
 +
 +    /* (non-Javadoc)
 +     * @see javax.servlet.FilterConfig#getInitParameterNames()
 +     */
 +    @Override
 +    public Enumeration<String> getInitParameterNames() {
 +      return null;
 +    }
 +
 +  }
 +
 +  protected static class TestJWTokenAuthority implements JWTokenAuthority {
 +
 +    private PublicKey verifyingKey;
 +
 +    public TestJWTokenAuthority(PublicKey verifyingKey) {
 +      this.verifyingKey = verifyingKey;
 +    }
 +
 +    /* (non-Javadoc)
 +     * @see JWTokenAuthority#issueToken(javax.security.auth.Subject, java.lang.String)
 +     */
 +    @Override
-     public JWTToken issueToken(Subject subject, String algorithm)
++    public JWT issueToken(Subject subject, String algorithm)
 +        throws TokenServiceException {
 +      // TODO Auto-generated method stub
 +      return null;
 +    }
 +
 +    /* (non-Javadoc)
 +     * @see JWTokenAuthority#issueToken(java.security.Principal, java.lang.String)
 +     */
 +    @Override
-     public JWTToken issueToken(Principal p, String algorithm)
++    public JWT issueToken(Principal p, String algorithm)
 +        throws TokenServiceException {
 +      // TODO Auto-generated method stub
 +      return null;
 +    }
 +
 +    /* (non-Javadoc)
 +     * @see JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String)
 +     */
 +    @Override
-     public JWTToken issueToken(Principal p, String audience, String algorithm)
++    public JWT issueToken(Principal p, String audience, String algorithm)
 +        throws TokenServiceException {
 +      return null;
 +    }
 +
 +    /* (non-Javadoc)
-      * @see JWTokenAuthority#verifyToken(JWTToken)
++     * @see org.apache.knox.gateway.services.security.token.JWTokenAuthority#verifyToken(org.apache.hadoop.gateway.services.security.token.impl.JWT)
 +     */
 +    @Override
-     public boolean verifyToken(JWTToken token) throws TokenServiceException {
++    public boolean verifyToken(JWT token) throws TokenServiceException {
 +      JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) verifyingKey);
 +      return token.verify(verifier);
 +    }
 +
 +    /* (non-Javadoc)
 +     * @see JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String, long)
 +     */
 +    @Override
-     public JWTToken issueToken(Principal p, String audience, String algorithm,
++    public JWT issueToken(Principal p, String audience, String algorithm,
 +        long expires) throws TokenServiceException {
 +      return null;
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
++    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
 +        long expires) throws TokenServiceException {
 +      return null;
 +    }
 +
 +    /* (non-Javadoc)
 +     * @see JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, long)
 +     */
 +    @Override
-     public JWT issueToken(Principal p, String audience, long l)
++    public JWT issueToken(Principal p, String algorithm, long expires)
 +        throws TokenServiceException {
 +      // TODO Auto-generated method stub
 +      return null;
 +    }
 +
 +    @Override
-     public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
++    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
 +      JWSVerifier verifier = new RSASSAVerifier(publicKey);
 +      return token.verify(verifier);
 +    }
 +
 +  }
 +
 +  protected static class TestFilterChain implements FilterChain {
 +    boolean doFilterCalled = false;
 +    Subject subject = null;
 +
 +    /* (non-Javadoc)
 +     * @see javax.servlet.FilterChain#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse)
 +     */
 +    @Override
 +    public void doFilter(ServletRequest request, ServletResponse response)
 +        throws IOException, ServletException {
 +      doFilterCalled = true;
 +
 +      subject = Subject.getSubject( AccessController.getContext() );
 +    }
 +
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
index 66828cd,0000000..cd2c0eb
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
@@@ -1,516 -1,0 +1,521 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import org.apache.commons.cli.ParseException;
 +import org.apache.knox.gateway.i18n.messages.Message;
 +import org.apache.knox.gateway.i18n.messages.MessageLevel;
 +import org.apache.knox.gateway.i18n.messages.Messages;
 +import org.apache.knox.gateway.i18n.messages.StackTrace;
 +import org.apache.knox.gateway.services.security.KeystoreServiceException;
 +
 +import java.io.File;
 +import java.net.URI;
 +import java.util.Date;
 +import java.util.Map;
 +import java.util.Set;
 +
 +/**
 + *
 + */
 +@Messages(logger="org.apache.knox.gateway")
 +public interface GatewayMessages {
 +
 +  @Message( level = MessageLevel.FATAL, text = "Failed to parse command line: {0}" )
 +  void failedToParseCommandLine( @StackTrace( level = MessageLevel.DEBUG ) ParseException e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Starting gateway..." )
 +  void startingGateway();
 +
 +  @Message( level = MessageLevel.FATAL, text = "Failed to start gateway: {0}" )
 +  void failedToStartGateway( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Started gateway on port {0}." )
 +  void startedGateway( int port );
 +
 +  @Message( level = MessageLevel.INFO, text = "Stopping gateway..." )
 +  void stoppingGateway();
 +
 +  @Message( level = MessageLevel.INFO, text = "Stopped gateway." )
 +  void stoppedGateway();
 +
 +  @Message( level = MessageLevel.INFO, text = "Loading configuration resource {0}" )
 +  void loadingConfigurationResource( String res );
 +
 +  @Message( level = MessageLevel.INFO, text = "Loading configuration file {0}" )
 +  void loadingConfigurationFile( String file );
 +
 +  @Message( level = MessageLevel.WARN, text = "Failed to load configuration file {0}: {1}" )
 +  void failedToLoadConfig( String path, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Using {1} as GATEWAY_HOME via {0}." )
 +  void settingGatewayHomeDir( String location, String home );
 +
 +  @Message( level = MessageLevel.INFO, text = "Loading topologies from directory: {0}" )
 +  void loadingTopologiesFromDirectory( String topologiesDir );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Loading topology file: {0}" )
 +  void loadingTopologyFile( String fileName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Monitoring topologies in directory: {0}" )
 +  void monitoringTopologyChangesInDirectory( String topologiesDir );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deploying topology {0} to {1}" )
 +  void deployingTopology( String clusterName, String warDirName );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Deployed topology {0}." )
 +  void deployedTopology( String clusterName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Loading topology {0} from {1}" )
 +  void redeployingTopology( String clusterName, String warDirName );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Redeployed topology {0}." )
 +  void redeployedTopology( String clusterName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Activating topology {0}" )
 +  void activatingTopology( String name );
 +
 +  @Message( level = MessageLevel.INFO, text = "Activating topology {0} archive {1}" )
 +  void activatingTopologyArchive( String topology, String archive );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deactivating topology {0}" )
 +  void deactivatingTopology( String name );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to deploy topology {0}: {1}" )
 +  void failedToDeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Throwable e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topology {0}" )
 +  void failedToRedeployTopology( String name );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topology {0}: {1}" )
 +  void failedToRedeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Throwable e );
 +
 +  @Message(level = MessageLevel.ERROR, text = "Failed to load topology {0}: Topology configuration is invalid!")
 +  void failedToLoadTopology(String fileName);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topologies: {0}" )
 +  void failedToRedeployTopologies( @StackTrace(level=MessageLevel.DEBUG) Throwable e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to undeploy topology {0}: {1}" )
 +  void failedToUndeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Exception e );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deleting topology {0}" )
 +  void deletingTopology( String topologyName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deleting deployed topology {0}" )
 +  void deletingDeployment( String warDirName );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Purge backups of deployed topology {0}" )
 +  void cleanupDeployments( String topologyName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Deleting backup deployed topology {0}" )
 +  void cleanupDeployment( String absolutePath );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating gateway home directory: {0}" )
 +  void creatingGatewayHomeDir( File homeDir );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating gateway deployment directory: {0}" )
 +  void creatingGatewayDeploymentDir( File topologiesDir );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating default gateway configuration file: {0}" )
 +  void creatingDefaultConfigFile( File defaultConfigFile );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating sample topology file: {0}" )
 +  void creatingDefaultTopologyFile( File defaultConfigFile );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null name: {0}" )
 +  void ignoringServiceContributorWithMissingName( String className );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null role: {0}" )
 +  void ignoringServiceContributorWithMissingRole( String className );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null version: {0}" )
 +  void ignoringServiceContributorWithMissingVersion( String className );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring provider deployment contributor with invalid null name: {0}" )
 +  void ignoringProviderContributorWithMissingName( String className );
 +
 +  @Message( level = MessageLevel.WARN, text = "Ignoring provider deployment contributor with invalid null role: {0}" )
 +  void ignoringProviderContributorWithMissingRole( String className );
 +
 +  @Message( level = MessageLevel.INFO, text = "Loaded logging configuration: {0}" )
 +  void loadedLoggingConfig( String fileName );
 +
 +  @Message( level = MessageLevel.WARN, text = "Failed to load logging configuration: {0}" )
 +  void failedToLoadLoggingConfig( String fileName );
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating credential store for the gateway instance." )
 +  void creatingCredentialStoreForGateway();
 +
 +  @Message( level = MessageLevel.INFO, text = "Credential store for the gateway instance found - no need to create one." )
 +  void credentialStoreForGatewayFoundNotCreating();
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating keystore for the gateway instance." )
 +  void creatingKeyStoreForGateway();
 +
 +  @Message( level = MessageLevel.INFO, text = "Keystore for the gateway instance found - no need to create one." )
 +  void keyStoreForGatewayFoundNotCreating();
 +
 +  @Message( level = MessageLevel.INFO, text = "Creating credential store for the cluster: {0}" )
 +  void creatingCredentialStoreForCluster(String clusterName);
 +
 +  @Message( level = MessageLevel.INFO, text = "Credential store found for the cluster: {0} - no need to create one." )
 +  void credentialStoreForClusterFoundNotCreating(String clusterName);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Received request: {0} {1}" )
 +  void receivedRequest( String method, String uri );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Dispatch request: {0} {1}" )
 +  void dispatchRequest( String method, URI uri );
 +  
 +  @Message( level = MessageLevel.WARN, text = "Connection exception dispatching request: {0} {1}" )
 +  void dispatchServiceConnectionException( URI uri, @StackTrace(level=MessageLevel.WARN) Exception e );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Signature verified: {0}" )
 +  void signatureVerified( boolean verified );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Apache Knox Gateway {0} ({1})" )
 +  void gatewayVersionMessage( String version, String hash );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to inject service {0}: {1}" )
 +  void failedToInjectService( String serviceName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to finalize contribution: {0}" )
 +  void failedToFinalizeContribution( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to contribute service [role={1}, name={0}]: {2}" )
 +  void failedToContributeService( String name, String role, @StackTrace( level = MessageLevel.ERROR ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to contribute provider [role={1}, name={0}]: {2}" )
 +  void failedToContributeProvider( String name, String role, @StackTrace( level = MessageLevel.ERROR ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to initialize contribution: {0}" )
 +  void failedToInitializeContribution( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to initialize servlet instance: {0}" )
 +  void failedToInitializeServletInstace( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Gateway processing failed: {0}" )
 +  void failedToExecuteFilter( @StackTrace( level = MessageLevel.INFO ) Throwable t );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to load topology {0}: {1}")
 +  void failedToLoadTopology( String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to load topology {0}, retrying after {1}ms: {2}")
 +  void failedToLoadTopologyRetrying( String friendlyURI, String delay, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to handle topology events: {0}" )
 +  void failedToHandleTopologyEvents( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to reload topologies: {0}" )
 +  void failedToReloadTopologies( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.FATAL, text = "Unsupported encoding: {0}" )
 +  void unsupportedEncoding( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to persist master secret: {0}" )
 +  void failedToPersistMasterSecret( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to encrypt master secret: {0}" )
 +  void failedToEncryptMasterSecret( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to initialize master service from persistent master {0}: {1}" )
 +  void failedToInitializeFromPersistentMaster( String masterFileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to encode passphrase: {0}" )
 +  void failedToEncodePassphrase( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to verify signature: {0}")
 +  void failedToVerifySignature( @StackTrace(level=MessageLevel.DEBUG) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to sign the data: {0}")
 +  void failedToSignData( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to decrypt password for cluster {0}: {1}" )
 +  void failedToDecryptPasswordForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to encrypt password for cluster {0}: {1}")
 +  void failedToEncryptPasswordForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +  
 +  @Message( level = MessageLevel.ERROR, text = "Failed to create keystore [filename={0}, type={1}]: {2}" )
 +  void failedToCreateKeystore( String fileName, String keyStoreType, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to load keystore [filename={0}, type={1}]: {2}" )
 +  void failedToLoadKeystore( String fileName, String keyStoreType, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to add key for cluster {0}: {1}" )
 +  void failedToAddKeyForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to add credential for cluster {0}: {1}" )
 +  void failedToAddCredentialForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +  
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get key for Gateway {0}: {1}" )
 +  void failedToGetKeyForGateway( String alias, @StackTrace( level=MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get credential for cluster {0}: {1}" )
 +  void failedToGetCredentialForCluster( String clusterName, @StackTrace(level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get key for cluster {0}: {1}" )
 +  void failedToGetKeyForCluster( String clusterName, @StackTrace(level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to add self signed certificate for Gateway {0}: {1}" )
 +  void failedToAddSeflSignedCertForGateway( String alias, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to generate secret key from password: {0}" )
 +  void failedToGenerateKeyFromPassword( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to establish connection to {0}: {1}" )
 +  void failedToEstablishConnectionToUrl( String url, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to interpret property \"{0}\": {1}")
 +  void failedToInterpretProperty( String property, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to instantiate the internal gateway services." )
 +  void failedToInstantiateGatewayServices();
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to serialize map to Json string {0}: {1}" )
 +  void failedToSerializeMapToJSON( Map<String, Object> map, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get map from Json string {0}: {1}" )
 +  void failedToGetMapFromJsonString( String json, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +  
 +  @Message( level = MessageLevel.DEBUG, text = "Successful Knox->Hadoop SPNegotiation authentication for URL: {0}" )
 +  void successfulSPNegoAuthn(String uri);
 +  
 +  @Message( level = MessageLevel.ERROR, text = "Failed Knox->Hadoop SPNegotiation authentication for URL: {0}" )
 +  void failedSPNegoAuthn(String uri);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Dispatch response status: {0}" )
 +  void dispatchResponseStatusCode(int statusCode);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Dispatch response status: {0}, Location: {1}" )
 +  void dispatchResponseCreatedStatusCode( int statusCode, String location );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to decrypt cipher text for cluster {0}: due to inability to retrieve the password." )
 +  void failedToDecryptCipherForClusterNullPassword(String clusterName);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Gateway services have not been initialized." )
 +  void gatewayServicesNotInitialized();
 +
 +  @Message( level = MessageLevel.INFO, text = "The Gateway SSL certificate is issued to hostname: {0}." )
 +  void certificateHostNameForGateway(String cn);
 +
 +  @Message( level = MessageLevel.INFO, text = "The Gateway SSL certificate is valid between: {0} and {1}." )
 +  void certificateValidityPeriod(Date notBefore, Date notAfter);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Unable to retrieve certificate for Gateway: {0}." )
 +  void unableToRetrieveCertificateForGateway(Exception e);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to generate alias for cluster: {0} {1}." )
 +  void failedToGenerateAliasForCluster(String clusterName, KeystoreServiceException e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Key passphrase not found in credential store - using master secret." )
 +  void assumingKeyPassphraseIsMaster();
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to remove alias for cluster: {0} {1}." )
 +  void failedToRemoveCredentialForCluster(String clusterName, Exception e);
 +
 +  @Message( level = MessageLevel.WARN, text = "Failed to match path {0}" )
 +  void failedToMatchPath( String path );
 +  
 +  @Message( level = MessageLevel.ERROR, text = "Failed to get system ldap connection: {0}" )
 +  void failedToGetSystemLdapConnection( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +  
 +  @Message( level = MessageLevel.WARN, text = "Value not found for cluster:{0}, alias: {1}" )
 +  void aliasValueNotFound( String cluster, String alias );
 +
 +  @Message( level = MessageLevel.INFO, text = "Computed userDn: {0} using dnTemplate for principal: {1}" )
 +  void computedUserDn(String userDn, String principal);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Searching from {0} where {1} scope {2}" )
 +  void searchBaseFilterScope( String searchBase, String searchFilter, String searchScope );
 +
 +  @Message( level = MessageLevel.INFO, text = "Computed userDn: {0} using ldapSearch for principal: {1}" )
 +  void searchedAndFoundUserDn(String userDn, String principal);
 +
 +  @Message( level = MessageLevel.INFO, text = "Computed roles/groups: {0} for principal: {1}" )
 +  void lookedUpUserRoles(Set<String> roleNames, String userName);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Initialize provider: {1}/{0}" )
 +  void initializeProvider( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Initialize service: {1}/{0}" )
 +  void initializeService( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Contribute provider: {1}/{0}" )
 +  void contributeProvider( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Contribute service: {1}/{0}" )
 +  void contributeService( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Finalize provider: {1}/{0}" )
 +  void finalizeProvider( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Finalize service: {1}/{0}" )
 +  void finalizeService( String name, String role );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Configured services directory is {0}" )
 +  void usingServicesDirectory(String path);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to unmarshall service definition file {0} file : {1}" )
 +  void failedToLoadServiceDefinition(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to find service definition file {0} file : {1}" )
 +  void failedToFindServiceDefinitionFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to find rewrite file {0} file : {1}" )
 +  void failedToFindRewriteFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.ERROR, text = "Failed to unmarshall rewrite file {0} file : {1}" )
 +  void failedToLoadRewriteFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "No rewrite file found in service directory {0}" )
 +  void noRewriteFileFound(String path);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Added Service definition name: {0}, role : {1}, version : {2}" )
 +  void addedServiceDefinition(String serviceName, String serviceRole, String version);
 +
 +  @Message( level = MessageLevel.INFO, text = "System Property: {0}={1}" )
 +  void logSysProp( String name, String property );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Unable to get password: {0}" )
 +  void unableToGetPassword(@StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Initialize application: {0}" )
 +  void initializeApplication( String name );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Contribute application: {0}" )
 +  void contributeApplication( String name );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Finalize application: {0}" )
 +  void finalizeApplication( String name );
 +
 +  @Message( level = MessageLevel.INFO, text = "Default topology {0} at {1}" )
 +  void defaultTopologySetup( String defaultTopologyName, String redirectContext );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Default topology forward from {0} to {1}" )
 +  void defaultTopologyForward( String oldTarget, String newTarget );
 +
 +  @Message( level = MessageLevel.ERROR, text = "Unable to setup PagedResults" )
 +  void unableToSetupPagedResults();
 +
 +  @Message( level = MessageLevel.INFO, text = "Ignoring PartialResultException" )
 +  void ignoringPartialResultException();
 +
 +  @Message( level = MessageLevel.WARN, text = "Only retrieved first {0} groups due to SizeLimitExceededException." )
 +  void sizeLimitExceededOnlyRetrieved(int numResults);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Failed to parse path into Template: {0} : {1}" )
 +  void failedToParsePath( String path, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Failed to initialize metrics reporter {0}  : {1}" )
 +  void failedToInitializeReporter( String name,  @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Failed to start metrics reporter {0}  : {1}" )
 +  void failedToStartReporter( String name,  @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.DEBUG, text = "Failed to stop metrics reporter {0}  : {1}" )
 +  void failedToStopReporter( String name,  @StackTrace( level = MessageLevel.DEBUG ) Exception e);
 +
 +  @Message( level = MessageLevel.INFO, text = "Cookie scoping feature enabled: {0}" )
 +  void cookieScopingFeatureEnabled( boolean enabled );
 +
 +  /**
 +   * Log whether Topology port mapping feature is enabled/disabled.
 +   *
 +   * @param enabled
 +   */
 +  @Message(level = MessageLevel.INFO,
 +           text = "Topology port mapping feature enabled: {0}")
 +  void gatewayTopologyPortMappingEnabled(final boolean enabled);
 +
 +  /**
 +   * @param topology
 +   * @param port
 +   */
 +  @Message(level = MessageLevel.DEBUG,
 +           text = "Creating a connector for topology {0} listening on port {1}.")
 +  void createJettyConnector(final String topology, final int port);
 +
 +  /**
 +   * @param topology
 +   */
 +  @Message(level = MessageLevel.DEBUG,
 +           text = "Creating a handler for topology {0}.")
 +  void createJettyHandler(final String topology);
 +
 +  /**
 +   * @param oldTarget
 +   * @param newTarget
 +   */
 +  @Message(level = MessageLevel.INFO,
 +           text = "Updating request context from {0} to {1}")
 +  void topologyPortMappingAddContext(final String oldTarget,
 +      final String newTarget);
 +
 +  /**
 +   * @param oldTarget
 +   * @param newTarget
 +   */
 +  @Message(level = MessageLevel.DEBUG,
 +           text = "Updating request target from {0} to {1}")
 +  void topologyPortMappingUpdateRequest(final String oldTarget,
 +      final String newTarget);
 +
 +  /**
 +   * Messages for Topology Port Mapping
 +   *
 +   * @param port
 +   * @param topology
 +   */
 +  @Message(level = MessageLevel.ERROR,
 +           text = "Port {0} configured for Topology - {1} is already in use.")
 +  void portAlreadyInUse(final int port, final String topology);
 +
 +  /**
 +   * Messages for Topology Port Mapping
 +   *
 +   * @param port
 +   */
 +  @Message(level = MessageLevel.ERROR,
 +           text = "Port {0} is already in use.")
 +  void portAlreadyInUse(final int port);
 +
 +  /**
 +   * Log topology and port
 +   *
 +   * @param topology
 +   * @param port
 +   */
 +  @Message(level = MessageLevel.INFO,
 +           text = "Started gateway, topology \"{0}\" listening on port \"{1}\".")
 +  void startedGateway(final String topology, final int port);
 +
 +  @Message(level = MessageLevel.ERROR,
 +           text =
 +               " Could not find topology \"{0}\" mapped to port \"{1}\" configured in gateway-config.xml. "
 +                   + "This invalid topology mapping will be ignored by the gateway. "
 +                   + "Gateway restart will be required if in the future \"{0}\" topology is added.")
-   void topologyPortMappingCannotFindTopology(final String topology,
-       final int port);
++  void topologyPortMappingCannotFindTopology(final String topology, final int port);
++
++
++  @Message( level = MessageLevel.ERROR, text = "An error occurred while processing {0} : {1}" )
++  void simpleDescriptorHandlingError(final String simpleDesc,
++                                     @StackTrace( level = MessageLevel.DEBUG ) Exception e );
++
 +}


[07/22] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
new file mode 100644
index 0000000..fb563fa
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.Service;
+import org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
+
+import java.io.*;
+import java.util.*;
+
+
+/**
+ * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
+ * gateway.
+ */
+public class SimpleDescriptorHandler {
+
+    private static final Service[] NO_GATEWAY_SERVICES = new Service[]{};
+
+    private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class);
+
+    public static Map<String, File> handle(File desc) throws IOException {
+        return handle(desc, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(File desc, Service...gatewayServices) throws IOException {
+        return handle(desc, desc.getParentFile(), gatewayServices);
+    }
+
+    public static Map<String, File> handle(File desc, File destDirectory) throws IOException {
+        return handle(desc, destDirectory, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(File desc, File destDirectory, Service...gatewayServices) throws IOException {
+        return handle(SimpleDescriptorFactory.parse(desc.getAbsolutePath()), desc.getParentFile(), destDirectory, gatewayServices);
+    }
+
+    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory) {
+        return handle(desc, srcDirectory, destDirectory, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) {
+        Map<String, File> result = new HashMap<>();
+
+        File topologyDescriptor;
+
+        DefaultServiceDiscoveryConfig sdc = new DefaultServiceDiscoveryConfig(desc.getDiscoveryAddress());
+        sdc.setUser(desc.getDiscoveryUser());
+        sdc.setPasswordAlias(desc.getDiscoveryPasswordAlias());
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(desc.getDiscoveryType(), gatewayServices);
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
+
+        Map<String, List<String>> serviceURLs = new HashMap<>();
+
+        if (cluster != null) {
+            for (SimpleDescriptor.Service descService : desc.getServices()) {
+                String serviceName = descService.getName();
+
+                List<String> descServiceURLs = descService.getURLs();
+                if (descServiceURLs == null || descServiceURLs.isEmpty()) {
+                    descServiceURLs = cluster.getServiceURLs(serviceName);
+                }
+
+                // If there is at least one URL associated with the service, then add it to the map
+                if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
+                    serviceURLs.put(serviceName, descServiceURLs);
+                } else {
+                    log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
+                    throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName +
+                                                    ". Topology update aborted!");
+                }
+            }
+        } else {
+            log.failedToDiscoverClusterServices(desc.getClusterName());
+        }
+
+        topologyDescriptor = null;
+        File providerConfig = null;
+        try {
+            // Verify that the referenced provider configuration exists before attempting to reading it
+            providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory);
+            if (providerConfig == null) {
+                log.failedToResolveProviderConfigRef(desc.getProviderConfig());
+                throw new IllegalArgumentException("Unresolved provider configuration reference: " +
+                                                   desc.getProviderConfig() + " ; Topology update aborted!");
+            }
+            result.put("reference", providerConfig);
+
+            // TODO: Should the contents of the provider config be validated before incorporating it into the topology?
+
+            String topologyFilename = desc.getName();
+            if (topologyFilename == null) {
+                topologyFilename = desc.getClusterName();
+            }
+            topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
+            FileWriter fw = new FileWriter(topologyDescriptor);
+
+            fw.write("<topology>\n");
+
+            // Copy the externalized provider configuration content into the topology descriptor in-line
+            InputStreamReader policyReader = new InputStreamReader(new FileInputStream(providerConfig));
+            char[] buffer = new char[1024];
+            int count;
+            while ((count = policyReader.read(buffer)) > 0) {
+                fw.write(buffer, 0, count);
+            }
+            policyReader.close();
+
+            // Write the service declarations
+            for (String serviceName : serviceURLs.keySet()) {
+                fw.write("    <service>\n");
+                fw.write("        <role>" + serviceName + "</role>\n");
+                for (String url : serviceURLs.get(serviceName)) {
+                    fw.write("        <url>" + url + "</url>\n");
+                }
+                fw.write("    </service>\n");
+            }
+
+            fw.write("</topology>\n");
+
+            fw.flush();
+            fw.close();
+        } catch (IOException e) {
+            log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
+            topologyDescriptor.delete();
+        }
+
+        result.put("topology", topologyDescriptor);
+        return result;
+    }
+
+
+    private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
+        File providerConfig;
+
+        // If the reference includes a path
+        if (reference.contains(File.separator)) {
+            // Check if it's an absolute path
+            providerConfig = new File(reference);
+            if (!providerConfig.exists()) {
+                // If it's not an absolute path, try treating it as a relative path
+                providerConfig = new File(srcDirectory, reference);
+                if (!providerConfig.exists()) {
+                    providerConfig = null;
+                }
+            }
+        } else { // No file path, just a name
+            // Check if it's co-located with the referencing descriptor
+            providerConfig = new File(srcDirectory, reference);
+            if (!providerConfig.exists()) {
+                // Check the shared-providers config location
+                File sharedProvidersDir = new File(srcDirectory, "../shared-providers");
+                if (sharedProvidersDir.exists()) {
+                    providerConfig = new File(sharedProvidersDir, reference);
+                    if (!providerConfig.exists()) {
+                        // Check if it's a valid name without the extension
+                        providerConfig = new File(sharedProvidersDir, reference + ".xml");
+                        if (!providerConfig.exists()) {
+                            providerConfig = null;
+                        }
+                    }
+                }
+            }
+        }
+
+        return providerConfig;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
new file mode 100644
index 0000000..32ceba9
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.ArrayList;
+import java.util.List;
+
+class SimpleDescriptorImpl implements SimpleDescriptor {
+
+    @JsonProperty("discovery-type")
+    private String discoveryType;
+
+    @JsonProperty("discovery-address")
+    private String discoveryAddress;
+
+    @JsonProperty("discovery-user")
+    private String discoveryUser;
+
+    @JsonProperty("discovery-pwd-alias")
+    private String discoveryPasswordAlias;
+
+    @JsonProperty("provider-config-ref")
+    private String providerConfig;
+
+    @JsonProperty("cluster")
+    private String cluster;
+
+    @JsonProperty("services")
+    private List<ServiceImpl> services;
+
+    private String name = null;
+
+    void setName(String name) {
+        this.name = name;
+    }
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+    @Override
+    public String getDiscoveryType() {
+        return discoveryType;
+    }
+
+    @Override
+    public String getDiscoveryAddress() {
+        return discoveryAddress;
+    }
+
+    @Override
+    public String getDiscoveryUser() {
+        return discoveryUser;
+    }
+
+    @Override
+    public String getDiscoveryPasswordAlias() {
+        return discoveryPasswordAlias;
+    }
+
+    @Override
+    public String getClusterName() {
+        return cluster;
+    }
+
+    @Override
+    public String getProviderConfig() {
+        return providerConfig;
+    }
+
+    @Override
+    public List<Service> getServices() {
+        List<Service> result = new ArrayList<>();
+        result.addAll(services);
+        return result;
+    }
+
+    public static class ServiceImpl implements Service {
+        private String name;
+        private List<String> urls;
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public List<String> getURLs() {
+            return urls;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
new file mode 100644
index 0000000..cf9aa28
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.simple")
+public interface SimpleDescriptorMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Service discovery for cluster {0} failed.")
+    void failedToDiscoverClusterServices(final String cluster);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No URLs were discovered for {0} in the {1} cluster.")
+    void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Failed to resolve the referenced provider configuration {0}.")
+    void failedToResolveProviderConfigRef(final String providerConfigRef);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Error generating topology {0} from simple descriptor: {1}")
+    void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
+                                                      @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
index 55cd5cc..498d750 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
@@ -22,8 +22,12 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.monitor.FileAlterationMonitor;
 import org.apache.commons.io.monitor.FileAlterationObserver;
 import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService;
 import org.apache.hadoop.gateway.topology.*;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
 import org.apache.hadoop.test.TestUtils;
 import org.easymock.EasyMock;
 import org.junit.After;
@@ -36,6 +40,8 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.*;
 
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.isA;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.Matchers.hasItem;
 import static org.hamcrest.core.IsNull.notNullValue;
@@ -78,9 +84,17 @@ public class DefaultTopologyServiceTest {
   public void testGetTopologies() throws Exception {
 
     File dir = createDir();
-    long time = dir.lastModified();
+    File topologyDir = new File(dir, "topologies");
+
+    File descriptorsDir = new File(dir, "descriptors");
+    descriptorsDir.mkdirs();
+
+    File sharedProvidersDir = new File(dir, "shared-providers");
+    sharedProvidersDir.mkdirs();
+
+    long time = topologyDir.lastModified();
     try {
-      createFile(dir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
+      createFile(topologyDir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
 
       TestTopologyListener topoListener = new TestTopologyListener();
       FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
@@ -89,17 +103,16 @@ public class DefaultTopologyServiceTest {
       Map<String, String> c = new HashMap<>();
 
       GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(dir.toString()).anyTimes();
+      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
       EasyMock.replay(config);
 
       provider.init(config, c);
 
-
       provider.addTopologyChangeListener(topoListener);
 
       provider.reloadTopologies();
 
-
       Collection<Topology> topologies = provider.getTopologies();
       assertThat(topologies, notNullValue());
       assertThat(topologies.size(), is(1));
@@ -110,7 +123,7 @@ public class DefaultTopologyServiceTest {
       topoListener.events.clear();
 
       // Add a file to the directory.
-      File two = createFile(dir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-two.xml", 1L);
+      File two = createFile(topologyDir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-two.xml", 1L);
       provider.reloadTopologies();
       topologies = provider.getTopologies();
       assertThat(topologies.size(), is(2));
@@ -131,7 +144,7 @@ public class DefaultTopologyServiceTest {
       assertThat(event.getTopology(), notNullValue());
 
       // Update a file in the directory.
-      two = createFile(dir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-three.xml", 2L);
+      two = createFile(topologyDir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-three.xml", 2L);
       provider.reloadTopologies();
       topologies = provider.getTopologies();
       assertThat(topologies.size(), is(2));
@@ -153,6 +166,49 @@ public class DefaultTopologyServiceTest {
       topology = topologies.iterator().next();
       assertThat(topology.getName(), is("one"));
       assertThat(topology.getTimestamp(), is(time));
+
+      // Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006)
+      // N.B. This part of the test depends on the DummyServiceDiscovery extension being configured:
+      //         org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+      AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+      EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes();
+      EasyMock.replay(aliasService);
+      DefaultTopologyService.DescriptorsMonitor dm =
+                                          new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
+
+      // Write out the referenced provider config first
+      File provCfgFile = createFile(sharedProvidersDir,
+                                    "ambari-cluster-policy.xml",
+                                    "org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml",
+                                    1L);
+      try {
+        // Create the simple descriptor in the descriptors dir
+        File simpleDesc =
+                createFile(descriptorsDir,
+                           "four.json",
+                           "org/apache/hadoop/gateway/topology/file/simple-topology-four.json",
+                           1L);
+
+        // Trigger the topology generation by noticing the simple descriptor
+        dm.onFileChange(simpleDesc);
+
+        // Load the generated topology
+        provider.reloadTopologies();
+        topologies = provider.getTopologies();
+        assertThat(topologies.size(), is(2));
+        names = new HashSet<>(Arrays.asList("one", "four"));
+        iterator = topologies.iterator();
+        topology = iterator.next();
+        assertThat(names, hasItem(topology.getName()));
+        names.remove(topology.getName());
+        topology = iterator.next();
+        assertThat(names, hasItem(topology.getName()));
+        names.remove(topology.getName());
+        assertThat(names.size(), is(0));
+      } finally {
+        provCfgFile.delete();
+
+      }
     } finally {
       FileUtils.deleteQuietly(dir);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
new file mode 100644
index 0000000..269bed2
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
+import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.junit.Assert.*;
+
+
+public class PropertiesFileServiceDiscoveryTest {
+
+    private static final Map<String, String> clusterProperties = new HashMap<>();
+    static {
+        clusterProperties.put("mycluster.name", "mycluster");
+        clusterProperties.put("mycluster.NAMENODE", "hdfs://namenodehost:8020");
+        clusterProperties.put("mycluster.JOBTRACKER", "rpc://jobtrackerhostname:8050");
+        clusterProperties.put("mycluster.WEBHCAT", "http://webhcathost:50111/templeton");
+        clusterProperties.put("mycluster.OOZIE", "http://ooziehost:11000/oozie");
+        clusterProperties.put("mycluster.HIVE", "http://hivehostname:10001/clipath");
+        clusterProperties.put("mycluster.RESOURCEMANAGER", "http://remanhost:8088/ws");
+    }
+
+    private static final Properties config = new Properties();
+    static {
+        for (String name : clusterProperties.keySet()) {
+            config.setProperty(name, clusterProperties.get(name));
+        }
+    }
+
+
+    @Test
+    public void testPropertiesFileServiceDiscovery() throws Exception {
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get("PROPERTIES_FILE");
+        assertNotNull(sd);
+
+        String discoveryAddress = this.getClass().getName() + "__test-discovery-source.properties";
+        File discoverySource = new File(discoveryAddress);
+        try {
+            config.store(new FileOutputStream(discoverySource), "Test discovery source for PropertiesFileServiceDiscovery");
+
+            ServiceDiscovery.Cluster c =
+                        sd.discover(new DefaultServiceDiscoveryConfig(discoverySource.getAbsolutePath()), "mycluster");
+            assertNotNull(c);
+            for (String name : clusterProperties.keySet()) {
+                assertEquals(clusterProperties.get(name), c.getServiceURLs(name.split("\\.")[1]).get(0));
+            }
+        } finally {
+            discoverySource.delete();
+        }
+    }
+
+
+    private void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            String value = "";
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    value += url + " ";
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, value));
+        }
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
new file mode 100644
index 0000000..d592ede
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
+import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+
+import static org.junit.Assert.*;
+
+
+public class ServiceDiscoveryFactoryTest {
+
+    @Test
+    public void testGetDummyImpl() throws Exception {
+        String TYPE = "DUMMY";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
+    }
+
+
+    @Test
+    public void testGetDummyImplWithMismatchedCase() throws Exception {
+        String TYPE = "dUmmY";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE.toUpperCase(), sd.getType());
+    }
+
+
+    @Test
+    public void testGetInvalidImpl() throws Exception {
+        String TYPE = "InValID";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNull("Unexpected ServiceDiscovery object.", sd);
+    }
+
+
+    @Test
+    public void testGetImplWithMismatchedType() throws Exception {
+        String TYPE = "DeclaredType";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNull("Unexpected ServiceDiscovery object.", sd);
+    }
+
+
+    @Test
+    public void testGetPropertiesFileImplWithAliasServiceInjection() throws Exception {
+        String TYPE = "PROPERTIES_FILE";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE, new DefaultAliasService());
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
+
+        // Verify that the AliasService was injected as expected
+        Field aliasServiceField = sd.getClass().getDeclaredField("aliasService");
+        aliasServiceField.setAccessible(true);
+        Object fieldValue = aliasServiceField.get(sd);
+        assertNotNull(fieldValue);
+        assertTrue(AliasService.class.isAssignableFrom(fieldValue.getClass()));
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
new file mode 100644
index 0000000..4a5323e
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This implementation is intended to be used by tests for which the actual service URLs are of no importance, such that
+ * tests can be written without having a valid service registry (e.g., Ambari) available.
+ */
+public class DummyServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "DUMMY";
+
+    private static final Cluster DUMMY = new Cluster() {
+        @Override
+        public String getName() {
+            return "dummy";
+        }
+
+        @Override
+        public List<String> getServiceURLs(String serviceName) {
+            return Collections.singletonList("http://servicehost:9999/dummy");
+        }
+    };
+
+    private static final Map<String, Cluster> CLUSTERS = new HashMap<>();
+    static {
+        CLUSTERS.put(DUMMY.getName(), DUMMY);
+    }
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        return CLUSTERS;
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        return DUMMY;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
new file mode 100644
index 0000000..d47c38d
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class DummyServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+        return DummyServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new DummyServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
new file mode 100644
index 0000000..a7fc34a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.*;
+
+class PropertiesFileServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "PROPERTIES_FILE";
+
+    @GatewayService
+    AliasService aliasService;
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+    @Override
+    public Map<String, ServiceDiscovery.Cluster> discover(ServiceDiscoveryConfig config) {
+
+        Map<String, ServiceDiscovery.Cluster> result = new HashMap<>();
+
+        Properties p = new Properties();
+        try {
+            p.load(new FileInputStream(config.getAddress()));
+
+            Map<String, Map<String, List<String>>> clusters = new HashMap<>();
+            for (Object key : p.keySet()) {
+                String propertyKey = (String)key;
+                String[] parts = propertyKey.split("\\.");
+                if (parts.length == 2) {
+                    String clusterName = parts[0];
+                    String serviceName = parts[1];
+                    String serviceURL  = p.getProperty(propertyKey);
+                    if (!clusters.containsKey(clusterName)) {
+                        clusters.put(clusterName, new HashMap<String, List<String>>());
+                    }
+                    Map<String, List<String>> serviceURLs = clusters.get(clusterName);
+                    if (!serviceURLs.containsKey(serviceName)) {
+                        serviceURLs.put(serviceName, new ArrayList<String>());
+                    }
+                    serviceURLs.get(serviceName).add(serviceURL);
+                }
+            }
+
+            for (String clusterName : clusters.keySet()) {
+                result.put(clusterName,
+                        new PropertiesFileServiceDiscovery.Cluster(clusterName, clusters.get(clusterName)));
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        return result;
+    }
+
+
+    @Override
+    public ServiceDiscovery.Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        Map<String, ServiceDiscovery.Cluster> clusters = discover(config);
+        return clusters.get(clusterName);
+    }
+
+
+    static class Cluster implements ServiceDiscovery.Cluster {
+        private String name;
+        private Map<String, List<String>> serviceURLS = new HashMap<>();
+
+        Cluster(String name, Map<String, List<String>> serviceURLs) {
+            this.name = name;
+            this.serviceURLS.putAll(serviceURLs);
+        }
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public List<String> getServiceURLs(String serviceName) {
+            return serviceURLS.get(serviceName);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
new file mode 100644
index 0000000..2cfd998
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class PropertiesFileServiceDiscoveryType implements ServiceDiscoveryType {
+
+    @Override
+    public String getType() {
+        return PropertiesFileServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new PropertiesFileServiceDiscovery();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
new file mode 100644
index 0000000..8f7b71a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.Map;
+
+public class SneakyServiceDiscoveryImpl implements ServiceDiscovery {
+    @Override
+    public String getType() {
+        return "ActualType";
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        return null;
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
new file mode 100644
index 0000000..97665dc
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class SneakyServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+        return "DeclaredType";
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new SneakyServiceDiscoveryImpl();
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
new file mode 100644
index 0000000..3dac66a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.Writer;
+import java.util.*;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+
+public class SimpleDescriptorFactoryTest {
+
+
+    @Test
+    public void testParseJSONSimpleDescriptor() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "admin";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        String fileName = "test-topology.json";
+        File testJSON = null;
+        try {
+            testJSON = writeJSON(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testJSON != null) {
+                try {
+                    testJSON.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+    @Test
+    public void testParseYAMLSimpleDescriptor() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "joeblow";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        String fileName = "test-topology.yml";
+        File testYAML = null;
+        try {
+            testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testYAML != null) {
+                try {
+                    testYAML.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+
+    private void validateSimpleDescriptor(SimpleDescriptor    sd,
+                                          String              discoveryType,
+                                          String              discoveryAddress,
+                                          String              providerConfig,
+                                          String              clusterName,
+                                          Map<String, List<String>> expectedServices) {
+        assertNotNull(sd);
+        assertEquals(discoveryType, sd.getDiscoveryType());
+        assertEquals(discoveryAddress, sd.getDiscoveryAddress());
+        assertEquals(providerConfig, sd.getProviderConfig());
+        assertEquals(clusterName, sd.getClusterName());
+
+        List<SimpleDescriptor.Service> actualServices = sd.getServices();
+
+        assertEquals(expectedServices.size(), actualServices.size());
+
+        for (SimpleDescriptor.Service actualService : actualServices) {
+            assertTrue(expectedServices.containsKey(actualService.getName()));
+            assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs());
+        }
+    }
+
+
+    private File writeJSON(String path, String content) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write(content);
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+
+    private File writeJSON(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write("{" + "\n");
+        fw.write("\"discovery-type\":\"" + discoveryType + "\",\n");
+        fw.write("\"discovery-address\":\"" + discoveryAddress + "\",\n");
+        fw.write("\"discovery-user\":\"" + discoveryUser + "\",\n");
+        fw.write("\"provider-config-ref\":\"" + providerConfig + "\",\n");
+        fw.write("\"cluster\":\"" + clusterName + "\",\n");
+        fw.write("\"services\":[\n");
+
+        int i = 0;
+        for (String name : services.keySet()) {
+            fw.write("{\"name\":\"" + name + "\"");
+            List<String> urls = services.get(name);
+            if (urls != null) {
+                fw.write(", \"urls\":[");
+                Iterator<String> urlIter = urls.iterator();
+                while (urlIter.hasNext()) {
+                    fw.write("\"" + urlIter.next() + "\"");
+                    if (urlIter.hasNext()) {
+                        fw.write(", ");
+                    }
+                }
+                fw.write("]");
+            }
+            fw.write("}");
+            if (i++ < services.size() - 1) {
+                fw.write(",");
+            }
+            fw.write("\n");
+        }
+        fw.write("]\n");
+        fw.write("}\n");
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+    private File writeYAML(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write("---" + "\n");
+        fw.write("discovery-type: " + discoveryType + "\n");
+        fw.write("discovery-address: " + discoveryAddress + "\n");
+        fw.write("discovery-user: " + discoveryUser + "\n");
+        fw.write("provider-config-ref: " + providerConfig + "\n");
+        fw.write("cluster: " + clusterName+ "\n");
+        fw.write("services:\n");
+        for (String name : services.keySet()) {
+            fw.write("    - name: " + name + "\n");
+            List<String> urls = services.get(name);
+            if (urls != null) {
+                fw.write("      urls:\n");
+                for (String url : urls) {
+                    fw.write("          - " + url + "\n");
+                }
+            }
+        }
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
new file mode 100644
index 0000000..90c7146
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
+import org.apache.hadoop.gateway.util.XmlUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathFactory;
+import java.io.*;
+import java.util.*;
+
+import static org.junit.Assert.*;
+
+
+public class SimpleDescriptorHandlerTest {
+
+    private static final String TEST_PROVIDER_CONFIG =
+            "    <gateway>\n" +
+                    "        <provider>\n" +
+                    "            <role>authentication</role>\n" +
+                    "            <name>ShiroProvider</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "            <param>\n" +
+                    "                <!-- \n" +
+                    "                session timeout in minutes,  this is really idle timeout,\n" +
+                    "                defaults to 30mins, if the property value is not defined,, \n" +
+                    "                current client authentication would expire if client idles contiuosly for more than this value\n" +
+                    "                -->\n" +
+                    "                <name>sessionTimeout</name>\n" +
+                    "                <value>30</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm</name>\n" +
+                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapContextFactory</name>\n" +
+                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory</name>\n" +
+                    "                <value>$ldapContextFactory</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.userDnTemplate</name>\n" +
+                    "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory.url</name>\n" +
+                    "                <value>ldap://localhost:33389</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+                    "                <value>simple</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>urls./**</name>\n" +
+                    "                <value>authcBasic</value>\n" +
+                    "            </param>\n" +
+                    "        </provider>\n" +
+                    "\n" +
+                    "        <provider>\n" +
+                    "            <role>identity-assertion</role>\n" +
+                    "            <name>Default</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "        </provider>\n" +
+                    "\n" +
+                    "        <!--\n" +
+                    "        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
+                    "        For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
+                    "        some AWS internal host name.  If the client needs to make a subsequent request to the host identified\n" +
+                    "        in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
+                    "\n" +
+                    "        If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
+                    "        enabled parameter as false.\n" +
+                    "\n" +
+                    "        The name parameter specifies the external host names in a comma separated list.\n" +
+                    "        The value parameter specifies corresponding internal host names in a comma separated list.\n" +
+                    "\n" +
+                    "        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
+                    "        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the\n" +
+                    "        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.\n" +
+                    "        -->\n" +
+                    "        <provider>\n" +
+                    "            <role>hostmap</role>\n" +
+                    "            <name>static</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+                    "        </provider>\n" +
+                    "    </gateway>\n";
+
+
+    /**
+     * KNOX-1006
+     *
+     * N.B. This test depends on the DummyServiceDiscovery extension being configured:
+     *             org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+     */
+    @Test
+    public void testSimpleDescriptorHandler() throws Exception {
+
+        final String type = "DUMMY";
+        final String address = "http://c6401.ambari.apache.org:8080";
+        final String clusterName = "dummy";
+        final Map<String, List<String>> serviceURLs = new HashMap<>();
+        serviceURLs.put("NAMENODE", null);
+        serviceURLs.put("JOBTRACKER", null);
+        serviceURLs.put("WEBHDFS", null);
+        serviceURLs.put("WEBHCAT", null);
+        serviceURLs.put("OOZIE", null);
+        serviceURLs.put("WEBHBASE", null);
+        serviceURLs.put("HIVE", null);
+        serviceURLs.put("RESOURCEMANAGER", null);
+        serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        // Write the externalized provider config to a temp file
+        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
+
+        File topologyFile = null;
+        try {
+            File destDir = (new File(".")).getCanonicalFile();
+
+            // Mock out the simple descriptor
+            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+            EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
+            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+            for (String serviceName : serviceURLs.keySet()) {
+                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+                EasyMock.replay(svc);
+                serviceMocks.add(svc);
+            }
+            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+            EasyMock.replay(testDescriptor);
+
+            // Invoke the simple descriptor handler
+            Map<String, File> files =
+                           SimpleDescriptorHandler.handle(testDescriptor,
+                                                          providerConfig.getParentFile(), // simple desc co-located with provider config
+                                                          destDir);
+            topologyFile = files.get("topology");
+
+            // Validate the resulting topology descriptor
+            assertTrue(topologyFile.exists());
+
+            // Validate the topology descriptor's correctness
+            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
+            if( !validator.validateTopology() ){
+                throw new SAXException( validator.getErrorString() );
+            }
+
+            XPathFactory xPathfactory = XPathFactory.newInstance();
+            XPath xpath = xPathfactory.newXPath();
+
+            // Parse the topology descriptor
+            Document topologyXml = XmlUtils.readXml(topologyFile);
+
+            // Validate the provider configuration
+            Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
+            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+            assertTrue("Resulting provider config should be identical to the referenced content.",
+                       extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
+
+            // Validate the service declarations
+            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+            NodeList serviceNodes =
+                        (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
+            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
+                Node serviceNode = serviceNodes.item(serviceNodeIndex);
+                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+                assertNotNull(roleNode);
+                String role = roleNode.getNodeValue();
+                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
+                    Node urlNode = urlNodes.item(urlNodeIndex);
+                    assertNotNull(urlNode);
+                    String url = urlNode.getNodeValue();
+                    assertNotNull("Every declared service should have a URL.", url);
+                    if (!topologyServiceURLs.containsKey(role)) {
+                        topologyServiceURLs.put(role, new ArrayList<String>());
+                    }
+                    topologyServiceURLs.get(role).add(url);
+                }
+            }
+            assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            fail(e.getMessage());
+        } finally {
+            providerConfig.delete();
+            if (topologyFile != null) {
+                topologyFile.delete();
+            }
+        }
+    }
+
+
+    private File writeProviderConfig(String path, String content) throws IOException {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write(content);
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..82a6f86
--- /dev/null
+++ b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,21 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.test.extension.SneakyServiceDiscoveryType
+org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType
+org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscoveryType
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
new file mode 100644
index 0000000..8223bea
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
@@ -0,0 +1,74 @@
+<gateway>
+    <provider>
+        <role>authentication</role>
+        <name>ShiroProvider</name>
+        <enabled>true</enabled>
+        <param>
+            <!--
+            session timeout in minutes,  this is really idle timeout,
+            defaults to 30mins, if the property value is not defined,,
+            current client authentication would expire if client idles contiuosly for more than this value
+            -->
+            <name>sessionTimeout</name>
+            <value>30</value>
+        </param>
+        <param>
+            <name>main.ldapRealm</name>
+            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+        </param>
+        <param>
+            <name>main.ldapContextFactory</name>
+            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory</name>
+            <value>$ldapContextFactory</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.userDnTemplate</name>
+            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory.url</name>
+            <value>ldap://localhost:33389</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+            <value>simple</value>
+        </param>
+        <param>
+            <name>urls./**</name>
+            <value>authcBasic</value>
+        </param>
+    </provider>
+
+    <provider>
+        <role>identity-assertion</role>
+        <name>Default</name>
+        <enabled>true</enabled>
+    </provider>
+
+    <!--
+    Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
+    For example, a hadoop service running in AWS may return a response that includes URLs containing the
+    some AWS internal host name.  If the client needs to make a subsequent request to the host identified
+    in those URLs they need to be mapped to external host names that the client Knox can use to connect.
+
+    If the external hostname and internal host names are same turn of this provider by setting the value of
+    enabled parameter as false.
+
+    The name parameter specifies the external host names in a comma separated list.
+    The value parameter specifies corresponding internal host names in a comma separated list.
+
+    Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
+    of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the
+    Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.
+    -->
+    <provider>
+        <role>hostmap</role>
+        <name>static</name>
+        <enabled>true</enabled>
+        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+    </provider>
+
+</gateway>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
new file mode 100644
index 0000000..45407a7
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
@@ -0,0 +1,18 @@
+{
+  "discovery-type":"DUMMY",
+  "discovery-address":"http://c6401.ambari.apache.org:8080",
+  "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml",
+  "cluster":"dummy",
+  "services":[
+    {"name":"NAMENODE"},
+    {"name":"JOBTRACKER"},
+    {"name":"WEBHDFS"},
+    {"name":"WEBHCAT"},
+    {"name":"OOZIE"},
+    {"name":"WEBHBASE"},
+    {"name":"HIVE"},
+    {"name":"RESOURCEMANAGER"},
+    {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]},
+    {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]}
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
new file mode 100644
index 0000000..554ddbe
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import java.lang.annotation.*;
+
+/**
+ * This annotation can be used to inject gateway services into a ServiceDiscovery implementation.
+ */
+@Documented
+@Target(ElementType.FIELD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface GatewayService {
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
new file mode 100644
index 0000000..eefa30b
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ * Implementations provide the means by which Hadoop service endpoint URLs are discovered from a source with knowledge
+ * about the service topology of one or more clusters.
+ */
+public interface ServiceDiscovery {
+
+    /**
+     * This is the type specified in a simple descriptor to indicate which ServiceDiscovery implementation to employ.
+     *
+     * @return The identifier for the service discovery type.
+     */
+    String getType();
+
+
+    /**
+     * Discover details of all the clusters known to the target registry.
+     *
+     * @param config The configuration for the discovery invocation
+     *
+     * @return A Map of the discovered service data, keyed by the cluster name.
+     */
+    Map<String, Cluster> discover(ServiceDiscoveryConfig config);
+
+
+    /**
+     * Discover details for a single cluster.
+     *
+     * @param config The configuration for the discovery invocation
+     * @param clusterName The name of a particular cluster
+     *
+     * @return The discovered service data for the specified cluster
+     */
+    Cluster discover(ServiceDiscoveryConfig config, String clusterName);
+
+
+    /**
+     * A handle to the service discovery result.
+     */
+    interface Cluster {
+
+        /**
+         * @return The name of the cluster
+         */
+        String getName();
+
+        /**
+         * @param serviceName The name of the service
+         * @return The URLs for the specified service in this cluster.
+         */
+        List<String> getServiceURLs(String serviceName);
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
new file mode 100644
index 0000000..6b2e741
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+/**
+ * ServiceDiscovery configuration details.
+ */
+public interface ServiceDiscoveryConfig {
+
+    /**
+     *
+     * @return The address of the discovery source.
+     */
+    String getAddress();
+
+    /**
+     *
+     * @return The username configured for interactions with the discovery source.
+     */
+    String getUser();
+
+    /**
+     *
+     * @return The alias for the password required for interactions with the discovery source.
+     */
+    String getPasswordAlias();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
new file mode 100644
index 0000000..cddced1
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+/**
+ * ServiceDiscovery extensions must implement this interface to add support for a new discovery source.
+ *
+ * The ServiceLoader mechanism is used to include ServiceDiscovery extensions, and implementations of this interface
+ * are the providers.
+ */
+public interface ServiceDiscoveryType {
+
+    /**
+     *
+     * @return The identifier for the discovery type.
+     */
+    String getType();
+
+
+    /**
+     *
+     * @return A new instance of the ServiceDiscovery implementation provided by this type.
+     */
+    ServiceDiscovery newInstance();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d2f4176..2708f6b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -45,6 +45,7 @@
         <module>gateway-i18n-logging-log4j</module>
         <module>gateway-i18n-logging-sl4j</module>
         <module>gateway-spi</module>
+        <module>gateway-discovery-ambari</module>
         <module>gateway-server</module>
         <module>gateway-server-launcher</module>
         <module>gateway-server-xforwarded-filter</module>
@@ -684,6 +685,11 @@
             </dependency>
             <dependency>
                 <groupId>${gateway-group}</groupId>
+                <artifactId>gateway-discovery-ambari</artifactId>
+                <version>${gateway-version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${gateway-group}</groupId>
                 <artifactId>gateway-release</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
@@ -702,17 +708,16 @@
                 <artifactId>gateway-shell-samples</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
-
-        <dependency>
-            <groupId>org.picketlink</groupId>
-            <artifactId>picketlink-federation</artifactId>
-            <version>2.7.0.CR3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.jboss.logging</groupId>
-            <artifactId>jboss-logging</artifactId>
-            <version>3.2.0.Final</version>
-        </dependency>
+            <dependency>
+                <groupId>org.picketlink</groupId>
+                <artifactId>picketlink-federation</artifactId>
+                <version>2.7.0.CR3</version>
+            </dependency>
+            <dependency>
+                <groupId>org.jboss.logging</groupId>
+                <artifactId>jboss-logging</artifactId>
+                <version>3.2.0.Final</version>
+            </dependency>
             <dependency>
                 <groupId>org.glassfish.jersey.containers</groupId>
                 <artifactId>jersey-container-servlet</artifactId>


[16/22] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
index af91d14,0000000..dfe34d4
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
@@@ -1,915 -1,0 +1,925 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.config.impl;
 +
 +import org.apache.commons.lang3.StringUtils;
 +import org.apache.hadoop.conf.Configuration;
 +import org.apache.hadoop.fs.Path;
 +import org.apache.knox.gateway.GatewayMessages;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.joda.time.Period;
 +import org.joda.time.format.PeriodFormatter;
 +import org.joda.time.format.PeriodFormatterBuilder;
 +
 +import java.io.File;
 +import java.net.InetSocketAddress;
 +import java.net.MalformedURLException;
 +import java.net.URL;
 +import java.net.UnknownHostException;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.Collections;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.concurrent.ConcurrentHashMap;
 +
 +/**
 + * The configuration for the Gateway.
 + *
 + * The Gateway configuration variables are described in gateway-default.xml
 + *
 + * The Gateway specific configuration is split into two layers:
 + *
 + * 1. gateway-default.xml - All the configuration variables that the
 + *    Gateway needs.  These are the defaults that ship with the app
 + *    and should only be changed by the app developers.
 + *
 + * 2. gateway-site.xml - The (possibly empty) configuration that the
 + *    system administrator can set variables for their Hadoop cluster.
 + *
 + * To find the gateway configuration files the following process is used.
 + * First, if the GATEWAY_HOME system property contains a valid directory name,
 + * an attempt will be made to read the configuration files from that directory.
 + * Second, if the GATEWAY_HOME environment variable contains a valid directory name,
 + * an attempt will be made to read the configuration files from that directory.
 + * Third, an attempt will be made to load the configuration files from the directory
 + * specified via the "user.dir" system property.
 + * Fourth, an attempt will be made to load the configuration files from the classpath.
 + * Last, defaults will be used for all values will be used.
 + *
 + * If GATEWAY_HOME isn't set via either the system property or environment variable then
 + * a value for this will be defaulted.  The default selected will be the directory that
 + * contained the last loaded configuration file that was not contained in a JAR.  If
 + * no such configuration file is loaded the value of the "user.dir" system property will be used
 + * as the value of GATEWAY_HOME.  This is important to consider for any relative file names as they
 + * will be resolved relative to the value of GATEWAY_HOME.  One such relative value is the
 + * name of the directory containing cluster topologies.  This value default to "clusters".
 + */
 +public class GatewayConfigImpl extends Configuration implements GatewayConfig {
 +
 +  private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME_PARAM = "default.app.topology.name";
 +  private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME = null;
 +
 +  private static final GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
 +
 +  private static final String GATEWAY_CONFIG_DIR_PREFIX = "conf";
 +
 +  private static final String GATEWAY_CONFIG_FILE_PREFIX = "gateway";
 +
 +  private static final String DEFAULT_STACKS_SERVICES_DIR = "services";
 +
 +  private static final String DEFAULT_APPLICATIONS_DIR = "applications";
 +
 +  public static final String[] GATEWAY_CONFIG_FILENAMES = {
 +      GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-default.xml",
 +      GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-site.xml"
 +  };
 +
 +//  private static final String[] HADOOP_CONF_FILENAMES = {
 +//      "core-default.xml",
 +//      "core-site.xml"
 +////      "hdfs-default.xml",
 +////      "hdfs-site.xml",
 +////      "mapred-default.xml",
 +////      "mapred-site.xml"
 +//  };
 +
 +//  private static final String[] HADOOP_PREFIX_VARS = {
 +//      "HADOOP_PREFIX",
 +//      "HADOOP_HOME"
 +//  };
 +
 +  public static final String HTTP_HOST = GATEWAY_CONFIG_FILE_PREFIX + ".host";
 +  public static final String HTTP_PORT = GATEWAY_CONFIG_FILE_PREFIX + ".port";
 +  public static final String HTTP_PATH = GATEWAY_CONFIG_FILE_PREFIX + ".path";
 +  public static final String DEPLOYMENT_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.dir";
 +  public static final String SECURITY_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".security.dir";
 +  public static final String DATA_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".data.dir";
 +  public static final String STACKS_SERVICES_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".services.dir";
 +  public static final String GLOBAL_RULES_SERVICES = GATEWAY_CONFIG_FILE_PREFIX + ".global.rules.services";
 +  public static final String APPLICATIONS_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".applications.dir";
 +  public static final String HADOOP_CONF_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".hadoop.conf.dir";
 +  public static final String FRONTEND_URL = GATEWAY_CONFIG_FILE_PREFIX + ".frontend.url";
 +  private static final String TRUST_ALL_CERTS = GATEWAY_CONFIG_FILE_PREFIX + ".trust.all.certs";
 +  private static final String CLIENT_AUTH_NEEDED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.needed";
++  private static final String CLIENT_AUTH_WANTED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.wanted";
 +  private static final String TRUSTSTORE_PATH = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.path";
 +  private static final String TRUSTSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.type";
 +  private static final String KEYSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".keystore.type";
 +  private static final String XFORWARDED_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".xforwarded.enabled";
 +  private static final String EPHEMERAL_DH_KEY_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".jdk.tls.ephemeralDHKeySize";
 +  private static final String HTTP_CLIENT_MAX_CONNECTION = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.maxConnections";
 +  private static final String HTTP_CLIENT_CONNECTION_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.connectionTimeout";
 +  private static final String HTTP_CLIENT_SOCKET_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.socketTimeout";
 +  private static final String THREAD_POOL_MAX = GATEWAY_CONFIG_FILE_PREFIX + ".threadpool.max";
 +  public static final String HTTP_SERVER_REQUEST_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.requestBuffer";
 +  public static final String HTTP_SERVER_REQUEST_HEADER_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.requestHeaderBuffer";
 +  public static final String HTTP_SERVER_RESPONSE_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.responseBuffer";
 +  public static final String HTTP_SERVER_RESPONSE_HEADER_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.responseHeaderBuffer";
 +  public static final String DEPLOYMENTS_BACKUP_VERSION_LIMIT = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.backup.versionLimit";
 +  public static final String DEPLOYMENTS_BACKUP_AGE_LIMIT = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.backup.ageLimit";
 +  public static final String METRICS_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".metrics.enabled";
 +  public static final String JMX_METRICS_REPORTING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".jmx.metrics.reporting.enabled";
 +  public static final String GRAPHITE_METRICS_REPORTING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.enabled";
 +  public static final String GRAPHITE_METRICS_REPORTING_HOST = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.host";
 +  public static final String GRAPHITE_METRICS_REPORTING_PORT = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.port";
 +  public static final String GRAPHITE_METRICS_REPORTING_FREQUENCY = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.frequency";
 +  public static final String GATEWAY_IDLE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".idle.timeout";
 +  public static final String REMOTE_IP_HEADER_NAME = GATEWAY_CONFIG_FILE_PREFIX + ".remote.ip.header.name";
 +
 +  /* @since 0.10 Websocket config variables */
 +  public static final String WEBSOCKET_FEATURE_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.feature.enabled";
 +  public static final String WEBSOCKET_MAX_TEXT_MESSAGE_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.text.size";
 +  public static final String WEBSOCKET_MAX_BINARY_MESSAGE_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.binary.size";
 +  public static final String WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.text.buffer.size";
 +  public static final String WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.binary.buffer.size";
 +  public static final String WEBSOCKET_INPUT_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.input.buffer.size";
 +  public static final String WEBSOCKET_ASYNC_WRITE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.async.write.timeout";
 +  public static final String WEBSOCKET_IDLE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.idle.timeout";
 +
 +  /**
 +   * Properties for for gateway port mapping feature
 +   */
 +  public static final String GATEWAY_PORT_MAPPING_PREFIX = GATEWAY_CONFIG_FILE_PREFIX + ".port.mapping.";
 +  public static final String GATEWAY_PORT_MAPPING_REGEX = GATEWAY_CONFIG_FILE_PREFIX + "\\.port\\.mapping\\..*";
 +  public static final String GATEWAY_PORT_MAPPING_ENABLED = GATEWAY_PORT_MAPPING_PREFIX + "enabled";
 +
 +  /**
 +   * Comma seperated list of MIME Types to be compressed by Knox on the way out.
 +   *
 +   * @since 0.12
 +   */
 +  public static final String MIME_TYPES_TO_COMPRESS = GATEWAY_CONFIG_FILE_PREFIX
 +      + ".gzip.compress.mime.types";
 +
 +  // These config property names are not inline with the convention of using the
 +  // GATEWAY_CONFIG_FILE_PREFIX as is done by those above. These are left for
 +  // backward compatibility. 
 +  // LET'S NOT CONTINUE THIS PATTERN BUT LEAVE THEM FOR NOW.
 +  private static final String SSL_ENABLED = "ssl.enabled";
 +  private static final String SSL_EXCLUDE_PROTOCOLS = "ssl.exclude.protocols";
 +  private static final String SSL_INCLUDE_CIPHERS = "ssl.include.ciphers";
 +  private static final String SSL_EXCLUDE_CIPHERS = "ssl.exclude.ciphers";
 +  // END BACKWARD COMPATIBLE BLOCK
 +  
 +  public static final String DEFAULT_HTTP_PORT = "8888";
 +  public static final String DEFAULT_HTTP_PATH = "gateway";
 +  public static final String DEFAULT_DEPLOYMENT_DIR = "deployments";
 +  public static final String DEFAULT_SECURITY_DIR = "security";
 +  public static final String DEFAULT_DATA_DIR = "data";
 +
 +  /* Websocket defaults */
 +  public static final boolean DEFAULT_WEBSOCKET_FEATURE_ENABLED = false;
 +  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE = Integer.MAX_VALUE;;
 +  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE = Integer.MAX_VALUE;;
 +  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE = 32768;
 +  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE = 32768;
 +  public static final int DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE = 4096;
 +  public static final int DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT = 60000;
 +  public static final int DEFAULT_WEBSOCKET_IDLE_TIMEOUT = 300000;
 +
 +  public static final boolean DEFAULT_GATEWAY_PORT_MAPPING_ENABLED = true;
 +
 +  /**
 +   * Default list of MIME Type to be compressed.
 +   * @since 0.12
 +   */
 +  public static final String DEFAULT_MIME_TYPES_TO_COMPRESS = "text/html, text/plain, text/xml, text/css, "
 +      + "application/javascript, application/x-javascript, text/javascript";
 +
 +  public static final String COOKIE_SCOPING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".scope.cookies.feature.enabled";
 +  public static final boolean DEFAULT_COOKIE_SCOPING_FEATURE_ENABLED = false;
 +  private static final String CRYPTO_ALGORITHM = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.algorithm";
 +  private static final String CRYPTO_PBE_ALGORITHM = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.pbe.algorithm";
 +  private static final String CRYPTO_TRANSFORMATION = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.transformation";
 +  private static final String CRYPTO_SALTSIZE = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.salt.size";
 +  private static final String CRYPTO_ITERATION_COUNT = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.iteration.count";
 +  private static final String CRYPTO_KEY_LENGTH = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.key.length";
 +  public static final String SERVER_HEADER_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".server.header.enabled";
 +
 +  private static List<String> DEFAULT_GLOBAL_RULES_SERVICES;
 +
 +
 +  public GatewayConfigImpl() {
 +    init();
 +  }
 +
 +  private String getVar( String variableName, String defaultValue ) {
 +    String value = get( variableName );
 +    if( value == null ) {
 +      value = System.getProperty( variableName );
 +    }
 +    if( value == null ) {
 +      value = System.getenv( variableName );
 +    }
 +    if( value == null ) {
 +      value = defaultValue;
 +    }
 +    return value;
 +  }
 +
 +  private String getGatewayHomeDir() {
 +    String home = get(
 +        GATEWAY_HOME_VAR,
 +        System.getProperty(
 +            GATEWAY_HOME_VAR,
 +            System.getenv( GATEWAY_HOME_VAR ) ) );
 +    return home;
 +  }
 +
 +  private void setGatewayHomeDir( String dir ) {
 +    set( GATEWAY_HOME_VAR, dir );
 +  }
 +
 +  @Override
 +  public String getGatewayConfDir() {
 +    String value = getVar( GATEWAY_CONF_HOME_VAR, getGatewayHomeDir() + File.separator + "conf"  );
 +    return value;
 +  }
 +
 +  @Override
 +  public String getGatewayDataDir() {
 +    String systemValue =
 +        System.getProperty(GATEWAY_DATA_HOME_VAR, System.getenv(GATEWAY_DATA_HOME_VAR));
 +    String dataDir = null;
 +    if (systemValue != null) {
 +      dataDir = systemValue;
 +    } else {
 +      dataDir = get(DATA_DIR, getGatewayHomeDir() + File.separator + DEFAULT_DATA_DIR);
 +    }
 +    return dataDir;
 +  }
 +
 +  @Override
 +  public String getGatewayServicesDir() {
 +    return get(STACKS_SERVICES_DIR, getGatewayDataDir() + File.separator + DEFAULT_STACKS_SERVICES_DIR);
 +  }
 +
 +  @Override
 +  public String getGatewayApplicationsDir() {
 +    return get(APPLICATIONS_DIR, getGatewayDataDir() + File.separator + DEFAULT_APPLICATIONS_DIR);
 +  }
 +
 +  @Override
 +  public String getHadoopConfDir() {
 +    return get( HADOOP_CONF_DIR );
 +  }
 +
 +  private void init() {
 +    // Load environment variables.
 +    for( Map.Entry<String, String> e : System.getenv().entrySet() ) {
 +      set( "env." + e.getKey(), e.getValue() );
 +    }
 +    // Load system properties.
 +    for( Map.Entry<Object, Object> p : System.getProperties().entrySet() ) {
 +      set( "sys." + p.getKey().toString(), p.getValue().toString() );
 +    }
 +
 +    URL lastFileUrl = null;
 +    for( String fileName : GATEWAY_CONFIG_FILENAMES ) {
 +      lastFileUrl = loadConfig( fileName, lastFileUrl );
 +    }
 +    //set default services list
 +    setDefaultGlobalRulesServices();
 +
 +    initGatewayHomeDir( lastFileUrl );
 +
 +    // log whether the scoping cookies to the gateway.path feature is enabled
 +    log.cookieScopingFeatureEnabled(isCookieScopingToPathEnabled());
 +  }
 +
 +  private void setDefaultGlobalRulesServices() {
 +    DEFAULT_GLOBAL_RULES_SERVICES = new ArrayList<>();
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("NAMENODE");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("JOBTRACKER");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHDFS");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHCAT");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("OOZIE");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHBASE");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("HIVE");
 +    DEFAULT_GLOBAL_RULES_SERVICES.add("RESOURCEMANAGER");
 +  }
 +
 +  private void initGatewayHomeDir( URL lastFileUrl ) {
 +    String home = System.getProperty( GATEWAY_HOME_VAR );
 +    if( home != null ) {
 +      set( GATEWAY_HOME_VAR, home );
 +      log.settingGatewayHomeDir( "system property", home );
 +      return;
 +    }
 +    home = System.getenv( GATEWAY_HOME_VAR );
 +    if( home != null ) {
 +      set( GATEWAY_HOME_VAR, home );
 +      log.settingGatewayHomeDir( "environment variable", home );
 +      return;
 +    }
 +    if( lastFileUrl != null ) {
 +      File file = new File( lastFileUrl.getFile() ).getAbsoluteFile();
 +      File dir = file.getParentFile().getParentFile(); // Move up two levels to get to parent of conf.
 +      if( dir.exists() && dir.canRead() )
 +        home = dir.getAbsolutePath();
 +      set( GATEWAY_HOME_VAR, home );
 +      log.settingGatewayHomeDir( "configuration file location", home );
 +      return;
 +    }
 +    home = System.getProperty( "user.dir" );
 +    if( home != null ) {
 +      set( GATEWAY_HOME_VAR, home );
 +      log.settingGatewayHomeDir( "user.dir system property", home );
 +      return;
 +    }
 +  }
 +
 +  // 1. GATEWAY_HOME system property
 +  // 2. GATEWAY_HOME environment variable
 +  // 3. user.dir system property
 +  // 4. class path
 +  private URL loadConfig( String fileName, URL lastFileUrl ) {
 +    lastFileUrl = loadConfigFile( System.getProperty( GATEWAY_HOME_VAR ), fileName );
 +    if( lastFileUrl == null ) {
 +      lastFileUrl = loadConfigFile( System.getenv( GATEWAY_HOME_VAR ), fileName );
 +    }
 +    if( lastFileUrl == null ) {
 +      lastFileUrl = loadConfigFile( System.getProperty( "user.dir" ), fileName );
 +    }
 +    if( lastFileUrl == null ) {
 +      lastFileUrl = loadConfigResource( fileName );
 +    }
 +    if( lastFileUrl != null && !"file".equals( lastFileUrl.getProtocol() ) ) {
 +      lastFileUrl = null;
 +    }
 +    return lastFileUrl;
 +  }
 +
 +  private URL loadConfigFile( String dir, String file ) {
 +    URL url = null;
 +    if( dir != null ) {
 +      File f = new File( dir, file );
 +      if( f.exists() ) {
 +        String path = f.getAbsolutePath();
 +        try {
 +          url = f.toURI().toURL();
 +          addResource( new Path( path ) );
 +          log.loadingConfigurationFile( path );
 +        } catch ( MalformedURLException e ) {
 +          log.failedToLoadConfig( path, e );
 +        }
 +      }
 +    }
 +    return url;
 +  }
 +
 +  private URL loadConfigResource( String file ) {
 +    URL url = getResource( file );
 +    if( url != null ) {
 +      log.loadingConfigurationResource( url.toExternalForm() );
 +      addResource( url );
 +    }
 +    return url;
 +  }
 +
 +  @Override
 +  public String getGatewayHost() {
 +    String host = get( HTTP_HOST, "0.0.0.0" );
 +    return host;
 +  }
 +
 +  @Override
 +  public int getGatewayPort() {
 +    return Integer.parseInt( get( HTTP_PORT, DEFAULT_HTTP_PORT ) );
 +  }
 +
 +  @Override
 +  public String getGatewayPath() {
 +    return get( HTTP_PATH, DEFAULT_HTTP_PATH );
 +  }
 +
 +  @Override
 +  public String getGatewayTopologyDir() {
 +    return getGatewayConfDir() + File.separator + "topologies";
 +  }
 +
 +  @Override
 +  public String getGatewayDeploymentDir() {
 +    return get(DEPLOYMENT_DIR, getGatewayDataDir() + File.separator + DEFAULT_DEPLOYMENT_DIR);
 +  }
 +
 +  @Override
 +  public String getGatewaySecurityDir() {
 +    return get(SECURITY_DIR, getGatewayDataDir() + File.separator + DEFAULT_SECURITY_DIR);
 +  }
 +
 +  @Override
 +  public InetSocketAddress getGatewayAddress() throws UnknownHostException {
 +    String host = getGatewayHost();
 +    int port = getGatewayPort();
 +    InetSocketAddress address = new InetSocketAddress( host, port );
 +    return address;
 +  }
 +
 +  @Override
 +  public boolean isSSLEnabled() {
 +    String enabled = get( SSL_ENABLED, "true" );
 +    
 +    return "true".equals(enabled);
 +  }
 +
 +  @Override
 +  public boolean isHadoopKerberosSecured() {
 +    String hadoopKerberosSecured = get( HADOOP_KERBEROS_SECURED, "false" );
 +    return "true".equals(hadoopKerberosSecured);
 +  }
 +
 +  @Override
 +  public String getKerberosConfig() {
 +    return get( KRB5_CONFIG ) ;
 +  }
 +
 +  @Override
 +  public boolean isKerberosDebugEnabled() {
 +    String kerberosDebugEnabled = get( KRB5_DEBUG, "false" );
 +    return "true".equals(kerberosDebugEnabled);
 +  }
 +  
 +  @Override
 +  public String getKerberosLoginConfig() {
 +    return get( KRB5_LOGIN_CONFIG );
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getDefaultTopologyName()
 +   */
 +  @Override
 +  public String getDefaultTopologyName() {
 +    String name = get(GATEWAY_DEFAULT_TOPOLOGY_NAME_PARAM);
 +    return name != null ? name : GATEWAY_DEFAULT_TOPOLOGY_NAME;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getDefaultAppRedirectPath()
 +   */
 +  @Override
 +  public String getDefaultAppRedirectPath() {
 +    String defTopo = getDefaultTopologyName();
 +    if( defTopo == null ) {
 +      return null;
 +    } else {
 +      return "/" + getGatewayPath() + "/" + defTopo;
 +    }
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getFrontendUrl()
 +   */
 +  @Override
 +  public String getFrontendUrl() {
 +    String url = get( FRONTEND_URL, null );
 +    return url;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getExcludedSSLProtocols()
 +   */
 +  @Override
 +  public List<String> getExcludedSSLProtocols() {
 +    List<String> protocols = null;
 +    String value = get(SSL_EXCLUDE_PROTOCOLS);
 +    if (!"none".equals(value)) {
 +      protocols = Arrays.asList(value.split("\\s*,\\s*"));
 +    }
 +    return protocols;
 +  }
 +
 +  @Override
 +  public List<String> getIncludedSSLCiphers() {
 +    List<String> list = null;
 +    String value = get(SSL_INCLUDE_CIPHERS);
 +    if (value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim())) {
 +      list = Arrays.asList(value.trim().split("\\s*,\\s*"));
 +    }
 +    return list;
 +  }
 +
 +  @Override
 +  public List<String> getExcludedSSLCiphers() {
 +    List<String> list = null;
 +    String value = get(SSL_EXCLUDE_CIPHERS);
 +    if (value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim())) {
 +      list = Arrays.asList(value.trim().split("\\s*,\\s*"));
 +    }
 +    return list;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#isClientAuthNeeded()
 +   */
 +  @Override
 +  public boolean isClientAuthNeeded() {
 +    String clientAuthNeeded = get( CLIENT_AUTH_NEEDED, "false" );
 +    return "true".equals(clientAuthNeeded);
 +  }
 +
 +  /* (non-Javadoc)
++   * @see org.apache.knox.gateway.config.GatewayConfig#isClientAuthWanted()
++   */
++  @Override
++  public boolean isClientAuthWanted() {
++    String clientAuthWanted = get( CLIENT_AUTH_WANTED, "false" );
++    return "true".equals(clientAuthWanted);
++  }
++
++  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststorePath()
 +   */
 +  @Override
 +  public String getTruststorePath() {
 +    return get( TRUSTSTORE_PATH, null);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTrustAllCerts()
 +   */
 +  @Override
 +  public boolean getTrustAllCerts() {
 +    String trustAllCerts = get( TRUST_ALL_CERTS, "false" );
 +    return "true".equals(trustAllCerts);
 +  }
 +  
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststorePath()
 +   */
 +  @Override
 +  public String getTruststoreType() {
 +    return get( TRUSTSTORE_TYPE, "JKS");
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getTruststorePath()
 +   */
 +  @Override
 +  public String getKeystoreType() {
 +    return get( KEYSTORE_TYPE, "JKS");
 +  }
 +
 +  @Override
 +  public boolean isXForwardedEnabled() {
 +    String xForwardedEnabled = get( XFORWARDED_ENABLED, "true" );
 +    return "true".equals(xForwardedEnabled);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getEphemeralDHKeySize()
 +   */
 +  @Override
 +  public String getEphemeralDHKeySize() {
 +    return get( EPHEMERAL_DH_KEY_SIZE, "2048");
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getHttpClientMaxConnections()
 +   */
 +  @Override
 +  public int getHttpClientMaxConnections() {
 +    return getInt( HTTP_CLIENT_MAX_CONNECTION, 32 );
 +  }
 +
 +  @Override
 +  public int getHttpClientConnectionTimeout() {
 +    int t = -1;
 +    String s = get( HTTP_CLIENT_CONNECTION_TIMEOUT, null );
 +    if ( s != null ) {
 +      try {
 +        t = (int)parseNetworkTimeout( s );
 +      } catch ( Exception e ) {
 +        // Ignore it and use the default.
 +      }
 +    }
 +    return t;
 +  }
 +
 +  @Override
 +  public int getHttpClientSocketTimeout() {
 +    int t = -1;
 +    String s = get( HTTP_CLIENT_SOCKET_TIMEOUT, null );
 +    if ( s != null ) {
 +      try {
 +        t = (int)parseNetworkTimeout( s );
 +      } catch ( Exception e ) {
 +        // Ignore it and use the default.
 +      }
 +    }
 +    return t;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#getThreadPoolMax()
 +   */
 +  @Override
 +  public int getThreadPoolMax() {
 +    int i = getInt( THREAD_POOL_MAX, 254 );
 +    // Testing has shown that a value lower than 5 prevents Jetty from servicing request.
 +    if( i < 5 ) {
 +      i = 5;
 +    }
 +    return i;
 +  }
 +
 +  @Override
 +  public int getHttpServerRequestBuffer() {
 +    int i = getInt( HTTP_SERVER_REQUEST_BUFFER, 16 * 1024 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getHttpServerRequestHeaderBuffer() {
 +    int i = getInt( HTTP_SERVER_REQUEST_HEADER_BUFFER, 8 * 1024 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getHttpServerResponseBuffer() {
 +    int i = getInt( HTTP_SERVER_RESPONSE_BUFFER, 32 * 1024 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getHttpServerResponseHeaderBuffer() {
 +    int i = getInt( HTTP_SERVER_RESPONSE_HEADER_BUFFER, 8 * 1024 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getGatewayDeploymentsBackupVersionLimit() {
 +    int i = getInt( DEPLOYMENTS_BACKUP_VERSION_LIMIT, 5 );
 +    if( i < 0 ) {
 +      i = -1;
 +    }
 +    return i;
 +  }
 +
 +  @Override
 +  public long getGatewayIdleTimeout() {
 +    return getLong(GATEWAY_IDLE_TIMEOUT, 300000l);
 +  }
 +
 +  @Override
 +  public long getGatewayDeploymentsBackupAgeLimit() {
 +    PeriodFormatter f = new PeriodFormatterBuilder().appendDays().toFormatter();
 +    String s = get( DEPLOYMENTS_BACKUP_AGE_LIMIT, "-1" );
 +    long d;
 +    try {
 +      Period p = Period.parse( s, f );
 +      d = p.toStandardDuration().getMillis();
 +      if( d < 0 ) {
 +        d = -1;
 +      }
 +    } catch( Exception e ) {
 +      d = -1;
 +    }
 +    return d;
 +  }
 +
 +  @Override
 +  public String getSigningKeystoreName() {
 +    return get(SIGNING_KEYSTORE_NAME);
 +  }
 +
 +  @Override
 +  public String getSigningKeyAlias() {
 +    return get(SIGNING_KEY_ALIAS);
 +  }
 +
 +  @Override
 +  public List<String> getGlobalRulesServices() {
 +    String value = get( GLOBAL_RULES_SERVICES );
 +    if ( value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim()) ) {
 +      return Arrays.asList( value.trim().split("\\s*,\\s*") );
 +    }
 +    return DEFAULT_GLOBAL_RULES_SERVICES;
 +  }
 +
 +  @Override
 +  public boolean isMetricsEnabled() {
 +    String metricsEnabled = get( METRICS_ENABLED, "false" );
 +    return "true".equals(metricsEnabled);
 +  }
 +
 +  @Override
 +  public boolean isJmxMetricsReportingEnabled() {
 +    String enabled = get( JMX_METRICS_REPORTING_ENABLED, "false" );
 +    return "true".equals(enabled);
 +  }
 +
 +  @Override
 +  public boolean isGraphiteMetricsReportingEnabled() {
 +    String enabled = get( GRAPHITE_METRICS_REPORTING_ENABLED, "false" );
 +    return "true".equals(enabled);
 +  }
 +
 +  @Override
 +  public String getGraphiteHost() {
 +    String host = get( GRAPHITE_METRICS_REPORTING_HOST, "localhost" );
 +    return host;
 +  }
 +
 +  @Override
 +  public int getGraphitePort() {
 +    int i = getInt( GRAPHITE_METRICS_REPORTING_PORT, 32772 );
 +    return i;
 +  }
 +
 +  @Override
 +  public int getGraphiteReportingFrequency() {
 +    int i = getInt( GRAPHITE_METRICS_REPORTING_FREQUENCY, 1 );
 +    return i;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#isWebsocketEnabled()
 +   */
 +  @Override
 +  public boolean isWebsocketEnabled() {
 +    final String result = get( WEBSOCKET_FEATURE_ENABLED, Boolean.toString(DEFAULT_WEBSOCKET_FEATURE_ENABLED));
 +    return Boolean.parseBoolean(result);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketMaxTextMessageSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxTextMessageSize() {
 +    return getInt( WEBSOCKET_MAX_TEXT_MESSAGE_SIZE, DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketMaxBinaryMessageSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxBinaryMessageSize() {
 +    return getInt( WEBSOCKET_MAX_BINARY_MESSAGE_SIZE, DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketMaxTextMessageBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxTextMessageBufferSize() {
 +    return getInt( WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE, DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketMaxBinaryMessageBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketMaxBinaryMessageBufferSize() {
 +    return getInt( WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE, DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketInputBufferSize()
 +   */
 +  @Override
 +  public int getWebsocketInputBufferSize() {
 +    return getInt( WEBSOCKET_INPUT_BUFFER_SIZE, DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketAsyncWriteTimeout()
 +   */
 +  @Override
 +  public int getWebsocketAsyncWriteTimeout() {
 +    return getInt( WEBSOCKET_ASYNC_WRITE_TIMEOUT, DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see GatewayConfig#websocketIdleTimeout()
 +   */
 +  @Override
 +  public int getWebsocketIdleTimeout() {
 +    return getInt( WEBSOCKET_IDLE_TIMEOUT, DEFAULT_WEBSOCKET_IDLE_TIMEOUT);
 +  }
 +
 +  /*
 +   * (non-Javadoc)
 +   *
 +   * @see
 +   * GatewayConfig#getMimeTypesToCompress()
 +   */
 +  @Override
 +  public List<String> getMimeTypesToCompress() {
 +    List<String> mimeTypes = null;
 +    String value = get(MIME_TYPES_TO_COMPRESS, DEFAULT_MIME_TYPES_TO_COMPRESS);
 +    if (value != null && !value.isEmpty()) {
 +      mimeTypes = Arrays.asList(value.trim().split("\\s*,\\s*"));
 +    }
 +    return mimeTypes;
 +  }
 +
 +  /**
 +   * Map of Topology names and their ports.
 +   *
 +   * @return
 +   */
 +  @Override
 +  public Map<String, Integer> getGatewayPortMappings() {
 +
 +    final Map<String, Integer> result = new ConcurrentHashMap<String, Integer>();
 +    final Map<String, String> properties = getValByRegex(GATEWAY_PORT_MAPPING_REGEX);
 +
 +    // Convert port no. from string to int
 +    for(final Map.Entry<String, String> e : properties.entrySet()) {
 +      // ignore the GATEWAY_PORT_MAPPING_ENABLED property
 +      if(!e.getKey().equalsIgnoreCase(GATEWAY_PORT_MAPPING_ENABLED)) {
 +        // extract the topology name and use it as a key
 +        result.put(StringUtils.substringAfter(e.getKey(), GATEWAY_PORT_MAPPING_PREFIX), Integer.parseInt(e.getValue()) );
 +      }
 +
 +    }
 +
 +    return Collections.unmodifiableMap(result);
 +  }
 +
 +  /**
 +   * Is the Port Mapping feature on ?
 +   *
 +   * @return
 +   */
 +  @Override
 +  public boolean isGatewayPortMappingEnabled() {
 +    final String result = get( GATEWAY_PORT_MAPPING_ENABLED, Boolean.toString(DEFAULT_GATEWAY_PORT_MAPPING_ENABLED));
 +    return Boolean.parseBoolean(result);
 +  }
 +
 +  private static long parseNetworkTimeout(String s ) {
 +    PeriodFormatter f = new PeriodFormatterBuilder()
 +        .appendMinutes().appendSuffix("m"," min")
 +        .appendSeconds().appendSuffix("s"," sec")
 +        .appendMillis().toFormatter();
 +    Period p = Period.parse( s, f );
 +    return p.toStandardDuration().getMillis();
 +  }
 +
 +  @Override
 +  public boolean isCookieScopingToPathEnabled() {
 +    final boolean result = Boolean.parseBoolean(get(COOKIE_SCOPING_ENABLED,
 +            Boolean.toString(DEFAULT_COOKIE_SCOPING_FEATURE_ENABLED)));
 +    return result;
 +  }
 +
 +  @Override
 +  public String getHeaderNameForRemoteAddress() {
 +    String value = getVar(REMOTE_IP_HEADER_NAME, "X-Forwarded-For");
 +    return value;
 +  }
 +
 +  @Override
 +  public String getAlgorithm() {
 +	return getVar(CRYPTO_ALGORITHM, null);
 +  }
 +
 +  @Override
 +  public String getPBEAlgorithm() {
 +	return getVar(CRYPTO_PBE_ALGORITHM, null);
 +  }
 +
 +  @Override
 +  public String getTransformation() {
 +	return getVar(CRYPTO_TRANSFORMATION, null);
 +  }
 +
 +  @Override
 +  public String getSaltSize() {
 +	return getVar(CRYPTO_SALTSIZE, null);
 +  }
 +
 +  @Override
 +  public String getIterationCount() {
 +	return getVar(CRYPTO_ITERATION_COUNT, null);
 +  }
 +
 +  @Override
 +  public String getKeyLength() {
 +	return getVar(CRYPTO_KEY_LENGTH, null);
 +  }
 +
 +  @Override
 +  public boolean isGatewayServerHeaderEnabled() {
 +    return Boolean.parseBoolean(getVar(SERVER_HEADER_ENABLED, "true"));
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
index 09c0505,0000000..c2acd54
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/DefaultGatewayServices.java
@@@ -1,224 -1,0 +1,223 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services;
 +
 +import org.apache.knox.gateway.GatewayMessages;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.deploy.DeploymentContext;
 +import org.apache.knox.gateway.descriptor.FilterParamDescriptor;
 +import org.apache.knox.gateway.descriptor.ResourceDescriptor;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.services.registry.impl.DefaultServiceDefinitionRegistry;
 +import org.apache.knox.gateway.services.metrics.impl.DefaultMetricsService;
 +import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
 +import org.apache.knox.gateway.services.hostmap.impl.DefaultHostMapperService;
 +import org.apache.knox.gateway.services.registry.impl.DefaultServiceRegistryService;
 +import org.apache.knox.gateway.services.security.KeystoreServiceException;
 +import org.apache.knox.gateway.services.security.SSLService;
 +import org.apache.knox.gateway.services.security.impl.DefaultAliasService;
 +import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;
 +import org.apache.knox.gateway.services.security.impl.DefaultKeystoreService;
 +import org.apache.knox.gateway.services.security.impl.DefaultMasterService;
 +import org.apache.knox.gateway.services.security.impl.JettySSLService;
 +import org.apache.knox.gateway.services.token.impl.DefaultTokenAuthorityService;
 +import org.apache.knox.gateway.topology.Provider;
 +
 +import java.util.Collection;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +public class DefaultGatewayServices implements GatewayServices {
 +
 +  private static GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
 +
 +  private Map<String,Service> services = new HashMap<>();
 +  private DefaultMasterService ms = null;
 +  private DefaultKeystoreService ks = null;
 +
 +  public DefaultGatewayServices() {
 +    super();
 +  }
 +
 +  public void init(GatewayConfig config, Map<String,String> options) throws ServiceLifecycleException {
 +    ms = new DefaultMasterService();
 +    ms.init(config, options);
 +    services.put("MasterService", ms);
 +
 +    ks = new DefaultKeystoreService();
 +    ks.setMasterService(ms);
 +    ks.init(config, options);
 +    services.put(KEYSTORE_SERVICE, ks);
 +    
 +    DefaultAliasService alias = new DefaultAliasService();
 +    alias.setKeystoreService(ks);
 +    alias.setMasterService(ms);
 +    alias.init(config, options);
 +    services.put(ALIAS_SERVICE, alias);
 +
 +    DefaultCryptoService crypto = new DefaultCryptoService();
 +    crypto.setKeystoreService(ks);
 +    crypto.setAliasService(alias);
 +    crypto.init(config, options);
 +    services.put(CRYPTO_SERVICE, crypto);
 +    
 +    DefaultTokenAuthorityService ts = new DefaultTokenAuthorityService();
 +    ts.setAliasService(alias);
 +    ts.setKeystoreService(ks);
 +    ts.init(config, options);
 +    // prolly should not allow the token service to be looked up?
 +    services.put(TOKEN_SERVICE, ts);
 +    
 +    JettySSLService ssl = new JettySSLService();
 +    ssl.setAliasService(alias);
 +    ssl.setKeystoreService(ks);
 +    ssl.setMasterService(ms);
 +    ssl.init(config, options);
 +    services.put(SSL_SERVICE, ssl);
 +
 +    DefaultServiceRegistryService sr = new DefaultServiceRegistryService();
 +    sr.setCryptoService( crypto );
 +    sr.init( config, options );
 +    services.put( SERVICE_REGISTRY_SERVICE, sr );
 +
 +    DefaultHostMapperService hm = new DefaultHostMapperService();
 +    hm.init( config, options );
 +    services.put( HOST_MAPPING_SERVICE, hm );
 +
 +    DefaultServerInfoService sis = new DefaultServerInfoService();
 +    sis.init( config, options );
 +    services.put( SERVER_INFO_SERVICE, sis );
 +
 +    DefaultTopologyService tops = new DefaultTopologyService();
++    tops.setAliasService(alias);
 +    tops.init(  config, options  );
 +    services.put(  TOPOLOGY_SERVICE, tops  );
 +
 +    DefaultServiceDefinitionRegistry sdr = new DefaultServiceDefinitionRegistry();
 +    sdr.init( config, options );
 +    services.put( SERVICE_DEFINITION_REGISTRY, sdr );
-     tops.init( config, options );
-     services.put( TOPOLOGY_SERVICE, tops );
 +
 +    DefaultMetricsService metricsService = new DefaultMetricsService();
 +    metricsService.init( config, options );
 +    services.put( METRICS_SERVICE, metricsService );
 +  }
 +  
 +  public void start() throws ServiceLifecycleException {
 +    ms.start();
 +
 +    ks.start();
 +
 +    DefaultAliasService alias = (DefaultAliasService) services.get(ALIAS_SERVICE);
 +    alias.start();
 +
 +    SSLService ssl = (SSLService) services.get(SSL_SERVICE);
 +    ssl.start();
 +
 +    ServerInfoService sis = (ServerInfoService) services.get(SERVER_INFO_SERVICE);
 +    sis.start();
 +
 +    DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
 +    tops.start();
 +
 +    DefaultMetricsService metricsService = (DefaultMetricsService) services.get(METRICS_SERVICE);
 +    metricsService.start();
 +  }
 +
 +  public void stop() throws ServiceLifecycleException {
 +    ms.stop();
 +
 +    ks.stop();
 +
 +    DefaultAliasService alias = (DefaultAliasService) services.get(ALIAS_SERVICE);
 +    alias.stop();
 +
 +    SSLService ssl = (SSLService) services.get(SSL_SERVICE);
 +    ssl.stop();
 +
 +    ServerInfoService sis = (ServerInfoService) services.get(SERVER_INFO_SERVICE);
 +    sis.stop();
 +
 +    DefaultTopologyService tops = (DefaultTopologyService)services.get(TOPOLOGY_SERVICE);
 +    tops.stop();
 +
 +    DefaultMetricsService metricsService = (DefaultMetricsService) services.get(METRICS_SERVICE);
 +    metricsService.stop();
 +
 +  }
 +  
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.GatewayServices#getServiceNames()
 +   */
 +  @Override
 +  public Collection<String> getServiceNames() {
 +    return services.keySet();
 +  }
 +  
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.GatewayServices#getService(java.lang.String)
 +   */
 +  @Override
 +  public <T> T getService(String serviceName) {
 +    return (T)services.get(serviceName);
 +  }
 +
 +  @Override
 +  public String getRole() {
 +    return "Services";
 +  }
 +
 +  @Override
 +  public String getName() {
 +    return "GatewayServices";
 +  }
 +
 +  @Override
 +  public void initializeContribution(DeploymentContext context) {
 +    // setup credential store as appropriate
 +    String clusterName = context.getTopology().getName();
 +    try {
 +      if (!ks.isCredentialStoreForClusterAvailable(clusterName)) {
 +        log.creatingCredentialStoreForCluster(clusterName);
 +        ks.createCredentialStoreForCluster(clusterName);
 +      }
 +      else {
 +        log.credentialStoreForClusterFoundNotCreating(clusterName);
 +      }
 +    } catch (KeystoreServiceException e) {
 +      throw new RuntimeException("Credential store was found but was unable to be loaded - the provided (or persisted) master secret may not match the password for the credential store.", e);
 +    }
 +  }
 +
 +  @Override
 +  public void contributeProvider(DeploymentContext context, Provider provider) {
 +  }
 +
 +  @Override
 +  public void contributeFilter(DeploymentContext context, Provider provider,
 +      org.apache.knox.gateway.topology.Service service,
 +      ResourceDescriptor resource, List<FilterParamDescriptor> params) {
 +  }
 +
 +  @Override
 +  public void finalizeContribution(DeploymentContext context) {
 +    // Tell the provider the location of the descriptor.
 +    context.getWebAppDescriptor().createListener().listenerClass( GatewayServicesContextListener.class.getName() );
 +    context.getWebAppDescriptor().createListener().listenerClass(GatewayMetricsServletContextListener.class.getName());
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-server/src/main/java/org/apache/knox/gateway/services/security/impl/JettySSLService.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/security/impl/JettySSLService.java
index 85e46df,0000000..378e93c
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/security/impl/JettySSLService.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/security/impl/JettySSLService.java
@@@ -1,255 -1,0 +1,262 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services.security.impl;
 +
 +import java.io.FileInputStream;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.security.KeyStore;
 +import java.security.KeyStoreException;
 +import java.security.NoSuchAlgorithmException;
 +import java.security.cert.Certificate;
 +import java.security.cert.CertificateException;
 +import java.security.cert.CertificateExpiredException;
 +import java.security.cert.CertificateNotYetValidException;
 +import java.security.cert.X509Certificate;
 +import java.util.Date;
 +import java.util.List;
 +import java.util.Map;
 +import javax.security.auth.x500.X500Principal;
 +
 +import org.apache.knox.gateway.GatewayMessages;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.gateway.services.security.AliasServiceException;
 +import org.apache.knox.gateway.services.security.KeystoreService;
 +import org.apache.knox.gateway.services.security.KeystoreServiceException;
 +import org.apache.knox.gateway.services.security.MasterService;
 +import org.apache.knox.gateway.services.security.SSLService;
 +import org.apache.knox.gateway.util.X500PrincipalParser;
 +import org.eclipse.jetty.util.ssl.SslContextFactory;
 +
 +public class JettySSLService implements SSLService {
 +  private static final String EPHEMERAL_DH_KEY_SIZE_PROPERTY = "jdk.tls.ephemeralDHKeySize";
 +  private static final String GATEWAY_TRUSTSTORE_PASSWORD = "gateway-truststore-password";
 +  private static final String GATEWAY_CREDENTIAL_STORE_NAME = "__gateway";
 +  private static GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
 +
 +  private MasterService ms;
 +  private KeystoreService ks;
 +  private AliasService as;
 +  private List<String> sslIncludeCiphers = null;
 +  private List<String> sslExcludeCiphers = null;
 +  private List<String> sslExcludeProtocols = null;
 +  private boolean clientAuthNeeded;
 +  private boolean trustAllCerts;
 +  private String truststorePath;
 +  private String keystoreType;
 +  private String trustStoreType;
++  private boolean clientAuthWanted;
 +
 +  public void setMasterService(MasterService ms) {
 +    this.ms = ms;
 +  }
 +
 +  public void setAliasService(AliasService as) {
 +    this.as = as;
 +  }
 +
 +  public void setKeystoreService(KeystoreService ks) {
 +    this.ks = ks;
 +  }
 +
 +  
 +  @Override
 +  public void init(GatewayConfig config, Map<String, String> options)
 +      throws ServiceLifecycleException {
 +    // set any JSSE or security related system properties
 +    System.setProperty(EPHEMERAL_DH_KEY_SIZE_PROPERTY, config.getEphemeralDHKeySize());
 +    try {
 +      if (!ks.isCredentialStoreForClusterAvailable(GATEWAY_CREDENTIAL_STORE_NAME)) {
 +        log.creatingCredentialStoreForGateway();
 +        ks.createCredentialStoreForCluster(GATEWAY_CREDENTIAL_STORE_NAME);
 +        // LET'S NOT GENERATE A DIFFERENT KEY PASSPHRASE BY DEFAULT ANYMORE
 +        // IF A DEPLOYMENT WANTS TO CHANGE THE KEY PASSPHRASE TO MAKE IT MORE SECURE THEN
 +        // THEY CAN ADD THE ALIAS EXPLICITLY WITH THE CLI
 +        // as.generateAliasForCluster(GATEWAY_CREDENTIAL_STORE_NAME, GATEWAY_IDENTITY_PASSPHRASE);
 +      }
 +      else {
 +        log.credentialStoreForGatewayFoundNotCreating();
 +      }
 +    } catch (KeystoreServiceException e) {
 +      throw new ServiceLifecycleException("Keystore was not loaded properly - the provided (or persisted) master secret may not match the password for the keystore.", e);
 +    }
 +
 +    try {
 +      if (!ks.isKeystoreForGatewayAvailable()) {
 +        log.creatingKeyStoreForGateway();
 +        ks.createKeystoreForGateway();
 +        char[] passphrase = null;
 +        try {
 +          passphrase = as.getGatewayIdentityPassphrase();
 +        } catch (AliasServiceException e) {
 +          throw new ServiceLifecycleException("Error accessing credential store for the gateway.", e);
 +        }
 +        if (passphrase == null) {
 +          passphrase = ms.getMasterSecret();
 +        }
 +        ks.addSelfSignedCertForGateway("gateway-identity", passphrase);
 +      }
 +      else {
 +        log.keyStoreForGatewayFoundNotCreating();
 +      }
 +      logAndValidateCertificate();
 +    } catch (KeystoreServiceException e) {
 +      throw new ServiceLifecycleException("Keystore was not loaded properly - the provided (or persisted) master secret may not match the password for the keystore.", e);
 +    }
 +
 +    keystoreType = config.getKeystoreType();
 +    sslIncludeCiphers = config.getIncludedSSLCiphers();
 +    sslExcludeCiphers = config.getExcludedSSLCiphers();
 +    sslExcludeProtocols = config.getExcludedSSLProtocols();
 +    clientAuthNeeded = config.isClientAuthNeeded();
++    clientAuthWanted = config.isClientAuthWanted();
 +    truststorePath = config.getTruststorePath();
 +    trustAllCerts = config.getTrustAllCerts();
 +    trustStoreType = config.getTruststoreType();
 +  }
 +
 +  private void logAndValidateCertificate() throws ServiceLifecycleException {
 +    // let's log the hostname (CN) and cert expiry from the gateway's public cert to aid in SSL debugging
 +    Certificate cert;
 +    try {
 +      cert = as.getCertificateForGateway("gateway-identity");
 +    } catch (AliasServiceException e) {
 +      throw new ServiceLifecycleException("Cannot Retreive Gateway SSL Certificate. Server will not start.", e);
 +    }
 +    if (cert != null) {
 +      if (cert instanceof X509Certificate) {
 +        X500Principal x500Principal = ((X509Certificate)cert).getSubjectX500Principal();
 +        X500PrincipalParser parser = new X500PrincipalParser(x500Principal);
 +        log.certificateHostNameForGateway(parser.getCN());
 +        Date notBefore = ((X509Certificate) cert).getNotBefore();
 +        Date notAfter = ((X509Certificate) cert).getNotAfter();
 +        log.certificateValidityPeriod(notBefore, notAfter);
 +        
 +        // let's not even start if the current date is not within the validity period for the SSL cert
 +        try {
 +          ((X509Certificate)cert).checkValidity();
 +        } catch (CertificateExpiredException e) {
 +          throw new ServiceLifecycleException("Gateway SSL Certificate is Expired. Server will not start.", e);
 +        } catch (CertificateNotYetValidException e) {
 +          throw new ServiceLifecycleException("Gateway SSL Certificate is not yet valid. Server will not start.", e);
 +        }
 +      }
 +      else {
 +        throw new ServiceLifecycleException("Public certificate for the gateway cannot be found with the alias gateway-identity. Plase check the identity certificate alias.");
 +      }
 +    }
 +    else {
 +      throw new ServiceLifecycleException("Public certificate for the gateway is not of the expected type of X509Certificate. Something is wrong with the gateway keystore.");
 +    }
 +  }
 +
 +  public Object buildSslContextFactory( String keystoreFileName ) throws KeyStoreException, IOException, CertificateException, NoSuchAlgorithmException {
 +    SslContextFactory sslContextFactory = new SslContextFactory( true );
 +    sslContextFactory.setCertAlias( "gateway-identity" );
 +    sslContextFactory.setKeyStoreType(keystoreType);
 +    sslContextFactory.setKeyStorePath(keystoreFileName);
 +    char[] master = ms.getMasterSecret();
 +    sslContextFactory.setKeyStorePassword(new String(master));
 +    char[] keypass = null;
 +    try {
 +      keypass = as.getGatewayIdentityPassphrase();
 +    } catch (AliasServiceException e) {
 +      // nop - default passphrase will be used
 +    }
 +    if (keypass == null) {
 +      // there has been no alias created for the key - let's assume it is the same as the keystore password
 +      keypass = master;
 +    }
 +    sslContextFactory.setKeyManagerPassword(new String(keypass));
 +
 +    String truststorePassword = null;
-     if (clientAuthNeeded) {
++    if (clientAuthNeeded || clientAuthWanted) {
 +      if (truststorePath != null) {
 +        sslContextFactory.setTrustStore(loadKeyStore(keystoreFileName, keystoreType, master));
 +        char[] truststorePwd = null;
 +        try {
 +          truststorePwd = as.getPasswordFromAliasForGateway(GATEWAY_TRUSTSTORE_PASSWORD);
 +        } catch (AliasServiceException e) {
 +          // nop - master secret will be used
 +        }
 +        if (truststorePwd != null) {
 +          truststorePassword = new String(truststorePwd);
 +        }
 +        else {
 +          truststorePassword = new String(master);
 +        }
 +        sslContextFactory.setTrustStorePassword(truststorePassword);
 +        sslContextFactory.setTrustStoreType(trustStoreType);
 +      }
 +      else {
 +        // when clientAuthIsNeeded but no truststore provided
 +        // default to the server's keystore and details
 +        sslContextFactory.setTrustStore(loadKeyStore(keystoreFileName, keystoreType, master));
 +        sslContextFactory.setTrustStorePassword(new String(master));
 +        sslContextFactory.setTrustStoreType(keystoreType);
 +      }
 +    }
-     sslContextFactory.setNeedClientAuth( clientAuthNeeded );
++    if (clientAuthNeeded) {
++      sslContextFactory.setNeedClientAuth( clientAuthNeeded );
++    }
++    else {
++      sslContextFactory.setWantClientAuth( clientAuthWanted );
++    }
 +    sslContextFactory.setTrustAll( trustAllCerts );
 +    if (sslIncludeCiphers != null && !sslIncludeCiphers.isEmpty()) {
 +      sslContextFactory.setIncludeCipherSuites( sslIncludeCiphers.toArray(new String[sslIncludeCiphers.size()]) );
 +    }
 +    if (sslExcludeCiphers != null && !sslExcludeCiphers.isEmpty()) {
 +      sslContextFactory.setExcludeCipherSuites( sslExcludeCiphers.toArray(new String[sslExcludeCiphers.size()]) );
 +    }
 +    if (sslExcludeProtocols != null && !sslExcludeProtocols.isEmpty()) {
 +      sslContextFactory.setExcludeProtocols( sslExcludeProtocols.toArray(new String[sslExcludeProtocols.size()]) );
 +    }
 +    return sslContextFactory;
 +  }
 +  
 +  @Override
 +  public void start() throws ServiceLifecycleException {
 +    // TODO Auto-generated method stub
 +    
 +  }
 +
 +  @Override
 +  public void stop() throws ServiceLifecycleException {
 +    // TODO Auto-generated method stub
 +    
 +  }
 +
 +  private static KeyStore loadKeyStore( String fileName, String storeType, char[] storePass ) throws CertificateException, NoSuchAlgorithmException, IOException, KeyStoreException {
 +    KeyStore keystore = KeyStore.getInstance(storeType);
 +    //Coverity CID 1352655
 +    InputStream is = new FileInputStream(fileName);
 +    try {
 +      keystore.load( is, storePass );
 +    } finally {
 +      if( is != null ) {
 +        is.close();
 +      }
 +    }
 +    return keystore;
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-server/src/main/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityService.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityService.java
index 44f4d4b,0000000..7f52b51
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityService.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityService.java
@@@ -1,225 -1,0 +1,226 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services.token.impl;
 +
 +import java.security.KeyStoreException;
 +import java.security.Principal;
 +import java.security.PublicKey;
 +import java.security.interfaces.RSAPrivateKey;
 +import java.security.interfaces.RSAPublicKey;
 +import java.util.Map;
 +import java.util.List;
 +import java.util.ArrayList;
 +
 +import javax.security.auth.Subject;
 +
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.Service;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.gateway.services.security.AliasServiceException;
 +import org.apache.knox.gateway.services.security.KeystoreService;
 +import org.apache.knox.gateway.services.security.KeystoreServiceException;
 +import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
 +import org.apache.knox.gateway.services.security.token.TokenServiceException;
++import org.apache.knox.gateway.services.security.token.impl.JWT;
 +import org.apache.knox.gateway.services.security.token.impl.JWTToken;
 +
 +import com.nimbusds.jose.JWSSigner;
 +import com.nimbusds.jose.JWSVerifier;
 +import com.nimbusds.jose.crypto.RSASSASigner;
 +import com.nimbusds.jose.crypto.RSASSAVerifier;
 +
 +public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
 +
 +  private static final String SIGNING_KEY_PASSPHRASE = "signing.key.passphrase";
 +  private AliasService as = null;
 +  private KeystoreService ks = null;
 +  String signingKeyAlias = null;
 +
 +  public void setKeystoreService(KeystoreService ks) {
 +    this.ks = ks;
 +  }
 +
 +  public void setAliasService(AliasService as) {
 +    this.as = as;
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(javax.security.auth.Subject, java.lang.String)
 +   */
 +  @Override
-   public JWTToken issueToken(Subject subject, String algorithm) throws TokenServiceException {
++  public JWT issueToken(Subject subject, String algorithm) throws TokenServiceException {
 +    Principal p = (Principal) subject.getPrincipals().toArray()[0];
 +    return issueToken(p, algorithm);
 +  }
-   
++
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String)
 +   */
 +  @Override
-   public JWTToken issueToken(Principal p, String algorithm) throws TokenServiceException {
++  public JWT issueToken(Principal p, String algorithm) throws TokenServiceException {
 +    return issueToken(p, null, algorithm);
 +  }
-   
++
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, long expires)
 +   */
 +  @Override
-   public JWTToken issueToken(Principal p, String algorithm, long expires) throws TokenServiceException {
++  public JWT issueToken(Principal p, String algorithm, long expires) throws TokenServiceException {
 +    return issueToken(p, (String)null, algorithm, expires);
 +  }
 +
-   public JWTToken issueToken(Principal p, String audience, String algorithm)
++  public JWT issueToken(Principal p, String audience, String algorithm)
 +      throws TokenServiceException {
 +    return issueToken(p, audience, algorithm, -1);
 +  }
 +
 +  /* (non-Javadoc)
 +   * @see org.apache.knox.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String)
 +   */
 +  @Override
-   public JWTToken issueToken(Principal p, String audience, String algorithm, long expires)
++  public JWT issueToken(Principal p, String audience, String algorithm, long expires)
 +      throws TokenServiceException {
 +    ArrayList<String> audiences = null;
 +    if (audience != null) {
 +      audiences = new ArrayList<String>();
 +      audiences.add(audience);
 +    }
 +    return issueToken(p, audiences, algorithm, expires);
 +  }
 +
 +  @Override
-   public JWTToken issueToken(Principal p, List<String> audiences, String algorithm, long expires)
++  public JWT issueToken(Principal p, List<String> audiences, String algorithm, long expires)
 +      throws TokenServiceException {
 +    String[] claimArray = new String[4];
 +    claimArray[0] = "KNOXSSO";
 +    claimArray[1] = p.getName();
 +    claimArray[2] = null;
 +    if (expires == -1) {
 +      claimArray[3] = null;
 +    }
 +    else {
 +      claimArray[3] = String.valueOf(expires);
 +    }
 +
 +    JWTToken token = null;
 +    if ("RS256".equals(algorithm)) {
 +      token = new JWTToken("RS256", claimArray, audiences);
 +      RSAPrivateKey key;
 +      char[] passphrase = null;
 +      try {
 +        passphrase = getSigningKeyPassphrase();
 +      } catch (AliasServiceException e) {
 +        throw new TokenServiceException(e);
 +      }
 +      try {
 +        key = (RSAPrivateKey) ks.getSigningKey(getSigningKeyAlias(),
 +            passphrase);
 +        JWSSigner signer = new RSASSASigner(key);
 +        token.sign(signer);
 +      } catch (KeystoreServiceException e) {
 +        throw new TokenServiceException(e);
 +      }
 +    }
 +    else {
 +      throw new TokenServiceException("Cannot issue token - Unsupported algorithm");
 +    }
 +
 +    return token;
 +  }
 +
 +  private char[] getSigningKeyPassphrase() throws AliasServiceException {
 +    char[] phrase = as.getPasswordFromAliasForGateway(SIGNING_KEY_PASSPHRASE);
 +    if (phrase == null) {
 +      phrase = as.getGatewayIdentityPassphrase();
 +    }
 +    return phrase;
 +  }
 +
 +  private String getSigningKeyAlias() {
 +    if (signingKeyAlias == null) {
 +      return "gateway-identity";
 +    }
 +    return signingKeyAlias;
 +  }
 +
 +  @Override
-   public boolean verifyToken(JWTToken token)
++  public boolean verifyToken(JWT token)
 +      throws TokenServiceException {
 +    return verifyToken(token, null);
 +  }
 +
 +  @Override
-   public boolean verifyToken(JWTToken token, RSAPublicKey publicKey)
++  public boolean verifyToken(JWT token, RSAPublicKey publicKey)
 +      throws TokenServiceException {
 +    boolean rc = false;
 +    PublicKey key;
 +    try {
 +      if (publicKey == null) {
 +        key = ks.getSigningKeystore().getCertificate(getSigningKeyAlias()).getPublicKey();
 +      }
 +      else {
 +        key = publicKey;
 +      }
 +      JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) key);
 +      // TODO: interrogate the token for issuer claim in order to determine the public key to use for verification
 +      // consider jwk for specifying the key too
 +      rc = token.verify(verifier);
 +    } catch (KeyStoreException e) {
 +      throw new TokenServiceException("Cannot verify token.", e);
 +    } catch (KeystoreServiceException e) {
 +      throw new TokenServiceException("Cannot verify token.", e);
 +    }
 +    return rc;
 +  }
 +
 +  @Override
 +  public void init(GatewayConfig config, Map<String, String> options)
 +      throws ServiceLifecycleException {
 +    if (as == null || ks == null) {
 +      throw new ServiceLifecycleException("Alias or Keystore service is not set");
 +    }
 +    signingKeyAlias = config.getSigningKeyAlias();
 +
 +    @SuppressWarnings("unused")
 +    RSAPrivateKey key;
 +    char[] passphrase = null;
 +    try {
 +      passphrase = as.getPasswordFromAliasForGateway(SIGNING_KEY_PASSPHRASE);
 +      if (passphrase != null) {
 +        key = (RSAPrivateKey) ks.getSigningKey(getSigningKeyAlias(),
 +            passphrase);
 +        if (key == null) {
 +          throw new ServiceLifecycleException("Provisioned passphrase cannot be used to acquire signing key.");
 +        }
 +      }
 +    } catch (AliasServiceException e) {
 +      throw new ServiceLifecycleException("Provisioned signing key passphrase cannot be acquired.", e);
 +    } catch (KeystoreServiceException e) {
 +      throw new ServiceLifecycleException("Provisioned signing key passphrase cannot be acquired.", e);
 +    }
 +  }
 +
 +  @Override
 +  public void start() throws ServiceLifecycleException {
 +  }
 +
 +  @Override
 +  public void stop() throws ServiceLifecycleException {
 +  }
 +}


[22/22] knox git commit: KNOX-998 - Merging from current master

Posted by mo...@apache.org.
KNOX-998 - Merging from current master


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/668aea18
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/668aea18
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/668aea18

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 668aea18059d18b6a9b3e9ade16e734b853756cd
Parents: b3107e9
Author: Sandeep More <mo...@apache.org>
Authored: Mon Sep 25 16:19:52 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Sep 25 16:19:52 2017 -0400

----------------------------------------------------------------------
 .../discovery/ambari/AmbariCluster.java         | 114 ---
 .../discovery/ambari/AmbariComponent.java       |  76 --
 .../ambari/AmbariServiceDiscovery.java          | 291 -------
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 --
 .../ambari/AmbariServiceDiscoveryType.java      |  35 -
 .../ambari/AmbariServiceURLCreator.java         | 184 ----
 .../discovery/ambari/AmbariCluster.java         | 114 +++
 .../discovery/ambari/AmbariComponent.java       |  76 ++
 .../ambari/AmbariServiceDiscovery.java          | 291 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 ++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/AmbariServiceURLCreator.java         | 184 ++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 -
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 -------------------
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 +++++++++++++++++++
 .../federation/AbstractJWTFilterTest.java       |   2 +-
 .../DefaultServiceDiscoveryConfig.java          |  48 --
 .../discovery/ServiceDiscoveryFactory.java      |  81 --
 .../topology/simple/SimpleDescriptor.java       |  46 -
 .../simple/SimpleDescriptorFactory.java         |  71 --
 .../simple/SimpleDescriptorHandler.java         | 186 ----
 .../topology/simple/SimpleDescriptorImpl.java   | 111 ---
 .../simple/SimpleDescriptorMessages.java        |  44 -
 .../DefaultServiceDiscoveryConfig.java          |  48 ++
 .../discovery/ServiceDiscoveryFactory.java      |  81 ++
 .../topology/simple/SimpleDescriptor.java       |  46 +
 .../simple/SimpleDescriptorFactory.java         |  71 ++
 .../simple/SimpleDescriptorHandler.java         | 187 ++++
 .../topology/simple/SimpleDescriptorImpl.java   | 111 +++
 .../simple/SimpleDescriptorMessages.java        |  44 +
 .../impl/DefaultTokenAuthorityServiceTest.java  |  16 +-
 .../PropertiesFileServiceDiscoveryTest.java     |  90 --
 .../discovery/ServiceDiscoveryFactoryTest.java  |  81 --
 .../test/extension/DummyServiceDiscovery.java   |  66 --
 .../extension/DummyServiceDiscoveryType.java    |  32 -
 .../PropertiesFileServiceDiscovery.java         | 108 ---
 .../PropertiesFileServiceDiscoveryType.java     |  35 -
 .../extension/SneakyServiceDiscoveryImpl.java   |  40 -
 .../extension/SneakyServiceDiscoveryType.java   |  33 -
 .../simple/SimpleDescriptorFactoryTest.java     | 218 -----
 .../simple/SimpleDescriptorHandlerTest.java     | 239 ------
 .../topology/DefaultTopologyServiceTest.java    |  12 +-
 .../PropertiesFileServiceDiscoveryTest.java     |  89 ++
 .../discovery/ServiceDiscoveryFactoryTest.java  |  80 ++
 .../test/extension/DummyServiceDiscovery.java   |  66 ++
 .../extension/DummyServiceDiscoveryType.java    |  32 +
 .../PropertiesFileServiceDiscovery.java         | 108 +++
 .../PropertiesFileServiceDiscoveryType.java     |  36 +
 .../extension/SneakyServiceDiscoveryImpl.java   |  40 +
 .../extension/SneakyServiceDiscoveryType.java   |  33 +
 .../simple/SimpleDescriptorFactoryTest.java     | 218 +++++
 .../simple/SimpleDescriptorHandlerTest.java     | 239 ++++++
 ...eway.topology.discovery.ServiceDiscoveryType |  21 -
 ...eway.topology.discovery.ServiceDiscoveryType |  21 +
 .../topology/file/ambari-cluster-policy.xml     |  74 --
 .../topology/file/simple-topology-four.json     |  18 -
 .../topology/file/ambari-cluster-policy.xml     |  74 ++
 .../topology/file/simple-topology-four.json     |  18 +
 .../topology/discovery/GatewayService.java      |  29 -
 .../topology/discovery/ServiceDiscovery.java    |  76 --
 .../discovery/ServiceDiscoveryConfig.java       |  42 -
 .../discovery/ServiceDiscoveryType.java         |  40 -
 .../topology/discovery/GatewayService.java      |  29 +
 .../topology/discovery/ServiceDiscovery.java    |  76 ++
 .../discovery/ServiceDiscoveryConfig.java       |  42 +
 .../discovery/ServiceDiscoveryType.java         |  40 +
 67 files changed, 3500 insertions(+), 3500 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
deleted file mode 100644
index 6eaabd3..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-class AmbariCluster implements ServiceDiscovery.Cluster {
-
-    private String name = null;
-
-    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
-    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
-
-    private Map<String, AmbariComponent> components = null;
-
-
-    AmbariCluster(String name) {
-        this.name = name;
-        components = new HashMap<String, AmbariComponent>();
-    }
-
-    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
-        if (!serviceConfigurations.keySet().contains(serviceName)) {
-            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
-        }
-        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
-    }
-
-
-    void addComponent(AmbariComponent component) {
-        components.put(component.getName(), component);
-    }
-
-
-    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
-        ServiceConfiguration sc = null;
-        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
-        if (configs != null) {
-            sc = configs.get(configurationType);
-        }
-        return sc;
-    }
-
-
-    Map<String, AmbariComponent> getComponents() {
-        return components;
-    }
-
-
-    AmbariComponent getComponent(String name) {
-        return components.get(name);
-    }
-
-
-    @Override
-    public String getName() {
-        return name;
-    }
-
-
-    @Override
-    public List<String> getServiceURLs(String serviceName) {
-        List<String> urls = new ArrayList<>();
-        urls.addAll(urlCreator.create(this, serviceName));
-        return urls;
-    }
-
-
-    static class ServiceConfiguration {
-
-        private String type;
-        private String version;
-        private Map<String, String> props;
-
-        ServiceConfiguration(String type, String version, Map<String, String> properties) {
-            this.type = type;
-            this.version = version;
-            this.props = properties;
-        }
-
-        public String getVersion() {
-            return version;
-        }
-
-        public String getType() {
-            return type;
-        }
-
-        public Map<String, String> getProperties() {
-            return props;
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
deleted file mode 100644
index 55257fb..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import java.util.List;
-import java.util.Map;
-
-class AmbariComponent {
-
-    private String clusterName = null;
-    private String serviceName = null;
-    private String name        = null;
-    private String version     = null;
-
-    private List<String> hostNames = null;
-
-    private Map<String, String> properties = null;
-
-    AmbariComponent(String              name,
-                    String              version,
-                    String              cluster,
-                    String              service,
-                    List<String>        hostNames,
-                    Map<String, String> properties) {
-        this.name = name;
-        this.serviceName = service;
-        this.clusterName = cluster;
-        this.version = version;
-        this.hostNames = hostNames;
-        this.properties = properties;
-    }
-
-    public String getVersion() {
-        return version;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public String getServiceName() {
-        return serviceName;
-    }
-
-    public String getClusterName() {
-        return clusterName;
-    }
-
-    public List<String> getHostNames() {
-        return hostNames;
-    }
-
-    public Map<String, String> getConfigProperties() {
-        return properties;
-    }
-
-    public String getConfigProperty(String propertyName) {
-        return properties.get(propertyName);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
deleted file mode 100644
index 34f20a7..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import net.minidev.json.JSONArray;
-import net.minidev.json.JSONObject;
-import net.minidev.json.JSONValue;
-import org.apache.hadoop.gateway.config.ConfigurationException;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.AliasServiceException;
-import org.apache.hadoop.gateway.topology.discovery.GatewayService;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpStatus;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.message.BasicHeader;
-import org.apache.http.util.EntityUtils;
-
-import java.io.IOException;
-import java.util.*;
-
-
-class AmbariServiceDiscovery implements ServiceDiscovery {
-
-    static final String TYPE = "AMBARI";
-
-    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
-
-    static final String AMBARI_HOSTROLES_URI =
-                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
-
-    static final String AMBARI_SERVICECONFIGS_URI =
-            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
-
-    // Map of component names to service configuration types
-    private static Map<String, String> componentServiceConfigs = new HashMap<>();
-    static {
-        componentServiceConfigs.put("NAMENODE", "hdfs-site");
-        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
-        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
-        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
-        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
-        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
-    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
-
-    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
-    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
-
-    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
-    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
-    @GatewayService
-    private AliasService aliasService;
-
-    private CloseableHttpClient httpClient = null;
-
-    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
-
-
-    AmbariServiceDiscovery() {
-        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
-    }
-
-
-    @Override
-    public String getType() {
-        return TYPE;
-    }
-
-
-    @Override
-    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
-        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
-
-        String discoveryAddress = config.getAddress();
-
-        // Invoke Ambari REST API to discover the available clusters
-        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
-
-        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
-
-        // Parse the cluster names from the response, and perform the cluster discovery
-        JSONArray clusterItems = (JSONArray) json.get("items");
-        for (Object clusterItem : clusterItems) {
-            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
-            try {
-                Cluster c = discover(config, clusterName);
-                clusters.put(clusterName, c);
-            } catch (Exception e) {
-                log.clusterDiscoveryError(clusterName, e);
-            }
-        }
-
-        return clusters;
-    }
-
-
-    @Override
-    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
-        AmbariCluster cluster = new AmbariCluster(clusterName);
-
-        Map<String, String> serviceComponents = new HashMap<>();
-
-        String discoveryAddress = config.getAddress();
-        String discoveryUser = config.getUser();
-        String discoveryPwdAlias = config.getPasswordAlias();
-
-        Map<String, List<String>> componentHostNames = new HashMap<>();
-        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
-        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
-        if (hostRolesJSON != null) {
-            // Process the host roles JSON
-            JSONArray items = (JSONArray) hostRolesJSON.get("items");
-            for (Object obj : items) {
-                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
-                for (Object component : components) {
-                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
-                    for (Object hostComponent : hostComponents) {
-                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
-                        String serviceName = (String) hostRoles.get("service_name");
-                        String componentName = (String) hostRoles.get("component_name");
-
-                        serviceComponents.put(componentName, serviceName);
-
-//                    String hostName = (String) hostRoles.get("host_name");
-                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
-                        log.discoveredServiceHost(serviceName, hostName);
-                        if (!componentHostNames.containsKey(componentName)) {
-                            componentHostNames.put(componentName, new ArrayList<String>());
-                        }
-                        componentHostNames.get(componentName).add(hostName);
-                    }
-                }
-            }
-        }
-
-        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
-                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
-        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
-        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
-        if (serviceConfigsJSON != null) {
-            // Process the service configurations
-            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
-            for (Object serviceConfig : serviceConfigs) {
-                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
-                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
-                for (Object configuration : configurations) {
-                    String configType = (String) ((JSONObject) configuration).get("type");
-                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
-
-                    Map<String, String> configProps = new HashMap<String, String>();
-                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
-                    for (String propertyName : configProperties.keySet()) {
-                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
-                    }
-                    if (!serviceConfigurations.containsKey(serviceName)) {
-                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
-                    }
-                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                }
-            }
-        }
-
-        // Construct the AmbariCluster model
-        for (String componentName : serviceComponents.keySet()) {
-            String serviceName = serviceComponents.get(componentName);
-            List<String> hostNames = componentHostNames.get(componentName);
-
-            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
-            String configType = componentServiceConfigs.get(componentName);
-            if (configType != null) {
-                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
-                AmbariComponent c = new AmbariComponent(componentName,
-                                                        svcConfig.getVersion(),
-                                                        clusterName,
-                                                        serviceName,
-                                                        hostNames,
-                                                        svcConfig.getProperties());
-                cluster.addComponent(c);
-            }
-        }
-
-        return cluster;
-    }
-
-
-    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
-        JSONObject result = null;
-
-        CloseableHttpResponse response = null;
-        try {
-            HttpGet request = new HttpGet(url);
-
-            // If no configured username, then use default username alias
-            String password = null;
-            if (username == null) {
-                if (aliasService != null) {
-                    try {
-                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
-                        if (defaultUser != null) {
-                            username = new String(defaultUser);
-                        }
-                    } catch (AliasServiceException e) {
-                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
-                    }
-                }
-
-                // If username is still null
-                if (username == null) {
-                    log.aliasServiceUserNotFound();
-                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
-                }
-            }
-
-            if (aliasService != null) {
-                // If not password alias is configured, then try the default alias
-                if (passwordAlias == null) {
-                    passwordAlias = DEFAULT_PWD_ALIAS;
-                }
-                try {
-                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
-                    if (pwd != null) {
-                        password = new String(pwd);
-                    }
-
-                } catch (AliasServiceException e) {
-                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
-                }
-            }
-
-            // If the password could not be determined
-            if (password == null) {
-                log.aliasServicePasswordNotFound();
-                throw new ConfigurationException("No password is configured for Ambari service discovery.");
-            }
-
-            // Add an auth header if credentials are available
-            String encodedCreds =
-                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
-            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
-
-            response = httpClient.execute(request);
-
-            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
-                HttpEntity entity = response.getEntity();
-                if (entity != null) {
-                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
-                    log.debugJSON(result.toJSONString());
-                } else {
-                    log.noJSON(url);
-                }
-            } else {
-                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
-            }
-
-        } catch (IOException e) {
-            log.restInvocationError(url, e);
-        } finally {
-            if(response != null) {
-                try {
-                    response.close();
-                } catch (IOException e) {
-                    // Ignore
-                }
-            }
-        }
-        return result;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
deleted file mode 100644
index caa16ed..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-
-@Messages(logger="org.apache.gateway.topology.discovery.ambari")
-public interface AmbariServiceDiscoveryMessages {
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error during cluster {0} discovery: {1}")
-    void clusterDiscoveryError(final String clusterName,
-                               @StackTrace(level = MessageLevel.ERROR) Exception e);
-
-
-    @Message(level = MessageLevel.DEBUG,
-            text = "REST invocation {0} failed: {1}")
-    void restInvocationError(final String url,
-                             @StackTrace(level = MessageLevel.ERROR) Exception e);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
-    void aliasServiceUserError(final String alias, final String error);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
-    void aliasServicePasswordError(final String alias, final String error);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "No user configured for Ambari service discovery.")
-    void aliasServiceUserNotFound();
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "No password configured for Ambari service discovery.")
-    void aliasServicePasswordNotFound();
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Unexpected REST invocation response code for {0} : {1}")
-    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "REST invocation {0} yielded a response without any JSON.")
-    void noJSON(final String url);
-
-
-    @Message(level = MessageLevel.DEBUG,
-            text = "REST invocation result: {0}")
-    void debugJSON(final String json);
-
-
-    @Message(level = MessageLevel.INFO,
-            text = "Discovered: Service: {0}, Host: {1}")
-    void discoveredServiceHost(final String serviceName, final String hostName);
-
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
deleted file mode 100644
index 723a786..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
-
-public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
-
-    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
-
-    @Override
-    public String getType() {
-        return AmbariServiceDiscovery.TYPE;
-    }
-
-    @Override
-    public ServiceDiscovery newInstance() {
-        return new AmbariServiceDiscovery();
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
deleted file mode 100644
index 0674642..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-
-import java.util.ArrayList;
-import java.util.List;
-
-class AmbariServiceURLCreator {
-
-    private static final String NAMENODE_SERVICE        = "NAMENODE";
-    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
-    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
-    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
-    private static final String OOZIE_SERVICE           = "OOZIE";
-    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
-    private static final String HIVE_SERVICE            = "HIVE";
-    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
-
-
-    /**
-     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
-     *
-     * @param cluster The cluster discovery results
-     * @param serviceName The name of a Hadoop service
-     *
-     * @return One or more endpoint URLs for the specified service.
-     */
-    public List<String> create(AmbariCluster cluster, String serviceName) {
-        List<String> result = null;
-
-        if (NAMENODE_SERVICE.equals(serviceName)) {
-            result = createNameNodeURL(cluster);
-        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
-            result = createJobTrackerURL(cluster);
-        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
-            result = createWebHDFSURL(cluster);
-        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
-            result = createWebHCatURL(cluster);
-        } else if (OOZIE_SERVICE.equals(serviceName)) {
-            result = createOozieURL(cluster);
-        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
-            result = createWebHBaseURL(cluster);
-        } else if (HIVE_SERVICE.equals(serviceName)) {
-            result = createHiveURL(cluster);
-        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
-            result = createResourceManagerURL(cluster);
-        }
-
-        return result;
-    }
-
-
-    private List<String> createNameNodeURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("NAMENODE");
-        if (comp != null) {
-            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createJobTrackerURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
-        if (comp != null) {
-            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHDFSURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
-        if (sc != null) {
-            String address = sc.getProperties().get("dfs.namenode.http-address");
-            result.add("http://" + address + "/webhdfs");
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHCatURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
-        if (webhcat != null) {
-            String port = webhcat.getConfigProperty("templeton.port");
-            String host = webhcat.getHostNames().get(0);
-
-            result.add("http://" + host + ":" + port + "/templeton");
-        }
-        return result;
-    }
-
-
-    private List<String> createOozieURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
-        if (comp != null) {
-            result.add(comp.getConfigProperty("oozie.base.url"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHBaseURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
-        if (comp != null) {
-            for (String host : comp.getHostNames()) {
-                result.add("http://" + host + ":60080");
-            }
-        }
-
-        return result;
-    }
-
-
-    private List<String> createHiveURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
-        if (hive != null) {
-            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
-            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
-            String transport = hive.getConfigProperty("hive.server2.transport.mode");
-            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
-            String host = hive.getHostNames().get(0);
-
-            String scheme = null; // What is the scheme for the binary transport mode?
-            if ("http".equals(transport)) {
-                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
-            }
-
-            result.add(scheme + "://" + host + ":" + port + "/" + path);
-        }
-        return result;
-    }
-
-
-    private List<String> createResourceManagerURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
-        if (resMan != null) {
-            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
-            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
-            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
-
-            result.add(scheme + "://" + webappAddress + "/ws");
-        }
-
-        return result;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
new file mode 100644
index 0000000..fa9d710
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class AmbariCluster implements ServiceDiscovery.Cluster {
+
+    private String name = null;
+
+    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+    private Map<String, AmbariComponent> components = null;
+
+
+    AmbariCluster(String name) {
+        this.name = name;
+        components = new HashMap<String, AmbariComponent>();
+    }
+
+    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
+        if (!serviceConfigurations.keySet().contains(serviceName)) {
+            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+        }
+        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
+    }
+
+
+    void addComponent(AmbariComponent component) {
+        components.put(component.getName(), component);
+    }
+
+
+    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
+        ServiceConfiguration sc = null;
+        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+        if (configs != null) {
+            sc = configs.get(configurationType);
+        }
+        return sc;
+    }
+
+
+    Map<String, AmbariComponent> getComponents() {
+        return components;
+    }
+
+
+    AmbariComponent getComponent(String name) {
+        return components.get(name);
+    }
+
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+
+    @Override
+    public List<String> getServiceURLs(String serviceName) {
+        List<String> urls = new ArrayList<>();
+        urls.addAll(urlCreator.create(this, serviceName));
+        return urls;
+    }
+
+
+    static class ServiceConfiguration {
+
+        private String type;
+        private String version;
+        private Map<String, String> props;
+
+        ServiceConfiguration(String type, String version, Map<String, String> properties) {
+            this.type = type;
+            this.version = version;
+            this.props = properties;
+        }
+
+        public String getVersion() {
+            return version;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public Map<String, String> getProperties() {
+            return props;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariComponent.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariComponent.java
new file mode 100644
index 0000000..4750e7e
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import java.util.List;
+import java.util.Map;
+
+class AmbariComponent {
+
+    private String clusterName = null;
+    private String serviceName = null;
+    private String name        = null;
+    private String version     = null;
+
+    private List<String> hostNames = null;
+
+    private Map<String, String> properties = null;
+
+    AmbariComponent(String              name,
+                    String              version,
+                    String              cluster,
+                    String              service,
+                    List<String>        hostNames,
+                    Map<String, String> properties) {
+        this.name = name;
+        this.serviceName = service;
+        this.clusterName = cluster;
+        this.version = version;
+        this.hostNames = hostNames;
+        this.properties = properties;
+    }
+
+    public String getVersion() {
+        return version;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String getServiceName() {
+        return serviceName;
+    }
+
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    public List<String> getHostNames() {
+        return hostNames;
+    }
+
+    public Map<String, String> getConfigProperties() {
+        return properties;
+    }
+
+    public String getConfigProperty(String propertyName) {
+        return properties.get(propertyName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
new file mode 100644
index 0000000..da03564
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.knox.gateway.config.ConfigurationException;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.AliasServiceException;
+import org.apache.knox.gateway.topology.discovery.GatewayService;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+import java.util.*;
+
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "AMBARI";
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    // Map of component names to service configuration types
+    private static Map<String, String> componentServiceConfigs = new HashMap<>();
+    static {
+        componentServiceConfigs.put("NAMENODE", "hdfs-site");
+        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
+        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
+        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
+        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
+        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
+    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    @GatewayService
+    private AliasService aliasService;
+
+    private CloseableHttpClient httpClient = null;
+
+    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
+
+
+    AmbariServiceDiscovery() {
+        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+    }
+
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+        String discoveryAddress = config.getAddress();
+
+        // Invoke Ambari REST API to discover the available clusters
+        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+        // Parse the cluster names from the response, and perform the cluster discovery
+        JSONArray clusterItems = (JSONArray) json.get("items");
+        for (Object clusterItem : clusterItems) {
+            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+            try {
+                Cluster c = discover(config, clusterName);
+                clusters.put(clusterName, c);
+            } catch (Exception e) {
+                log.clusterDiscoveryError(clusterName, e);
+            }
+        }
+
+        return clusters;
+    }
+
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        AmbariCluster cluster = new AmbariCluster(clusterName);
+
+        Map<String, String> serviceComponents = new HashMap<>();
+
+        String discoveryAddress = config.getAddress();
+        String discoveryUser = config.getUser();
+        String discoveryPwdAlias = config.getPasswordAlias();
+
+        Map<String, List<String>> componentHostNames = new HashMap<>();
+        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+        if (hostRolesJSON != null) {
+            // Process the host roles JSON
+            JSONArray items = (JSONArray) hostRolesJSON.get("items");
+            for (Object obj : items) {
+                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+                for (Object component : components) {
+                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+                    for (Object hostComponent : hostComponents) {
+                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+                        String serviceName = (String) hostRoles.get("service_name");
+                        String componentName = (String) hostRoles.get("component_name");
+
+                        serviceComponents.put(componentName, serviceName);
+
+//                    String hostName = (String) hostRoles.get("host_name");
+                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
+                        log.discoveredServiceHost(serviceName, hostName);
+                        if (!componentHostNames.containsKey(componentName)) {
+                            componentHostNames.put(componentName, new ArrayList<String>());
+                        }
+                        componentHostNames.get(componentName).add(hostName);
+                    }
+                }
+            }
+        }
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<String, String>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                }
+            }
+        }
+
+        // Construct the AmbariCluster model
+        for (String componentName : serviceComponents.keySet()) {
+            String serviceName = serviceComponents.get(componentName);
+            List<String> hostNames = componentHostNames.get(componentName);
+
+            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+            String configType = componentServiceConfigs.get(componentName);
+            if (configType != null) {
+                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+                AmbariComponent c = new AmbariComponent(componentName,
+                                                        svcConfig.getVersion(),
+                                                        clusterName,
+                                                        serviceName,
+                                                        hostNames,
+                                                        svcConfig.getProperties());
+                cluster.addComponent(c);
+            }
+        }
+
+        return cluster;
+    }
+
+
+    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If not password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
new file mode 100644
index 0000000..2a153bb
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.i18n.messages.Message;
+import org.apache.knox.gateway.i18n.messages.MessageLevel;
+import org.apache.knox.gateway.i18n.messages.Messages;
+import org.apache.knox.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+public interface AmbariServiceDiscoveryMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error during cluster {0} discovery: {1}")
+    void clusterDiscoveryError(final String clusterName,
+                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation {0} failed: {1}")
+    void restInvocationError(final String url,
+                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+    void aliasServiceUserError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+    void aliasServicePasswordError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No user configured for Ambari service discovery.")
+    void aliasServiceUserNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No password configured for Ambari service discovery.")
+    void aliasServicePasswordNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Unexpected REST invocation response code for {0} : {1}")
+    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "REST invocation {0} yielded a response without any JSON.")
+    void noJSON(final String url);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation result: {0}")
+    void debugJSON(final String json);
+
+
+    @Message(level = MessageLevel.INFO,
+            text = "Discovered: Service: {0}, Host: {1}")
+    void discoveredServiceHost(final String serviceName, final String hostName);
+
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
new file mode 100644
index 0000000..23d11e0
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
+
+    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
+
+    @Override
+    public String getType() {
+        return AmbariServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new AmbariServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
new file mode 100644
index 0000000..302eda7
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+
+import java.util.ArrayList;
+import java.util.List;
+
+class AmbariServiceURLCreator {
+
+    private static final String NAMENODE_SERVICE        = "NAMENODE";
+    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
+    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
+    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
+    private static final String OOZIE_SERVICE           = "OOZIE";
+    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
+    private static final String HIVE_SERVICE            = "HIVE";
+    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
+
+
+    /**
+     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
+     *
+     * @param cluster The cluster discovery results
+     * @param serviceName The name of a Hadoop service
+     *
+     * @return One or more endpoint URLs for the specified service.
+     */
+    public List<String> create(AmbariCluster cluster, String serviceName) {
+        List<String> result = null;
+
+        if (NAMENODE_SERVICE.equals(serviceName)) {
+            result = createNameNodeURL(cluster);
+        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
+            result = createJobTrackerURL(cluster);
+        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
+            result = createWebHDFSURL(cluster);
+        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
+            result = createWebHCatURL(cluster);
+        } else if (OOZIE_SERVICE.equals(serviceName)) {
+            result = createOozieURL(cluster);
+        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
+            result = createWebHBaseURL(cluster);
+        } else if (HIVE_SERVICE.equals(serviceName)) {
+            result = createHiveURL(cluster);
+        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
+            result = createResourceManagerURL(cluster);
+        }
+
+        return result;
+    }
+
+
+    private List<String> createNameNodeURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("NAMENODE");
+        if (comp != null) {
+            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createJobTrackerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
+        if (comp != null) {
+            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHDFSURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
+        if (sc != null) {
+            String address = sc.getProperties().get("dfs.namenode.http-address");
+            result.add("http://" + address + "/webhdfs");
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHCatURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
+        if (webhcat != null) {
+            String port = webhcat.getConfigProperty("templeton.port");
+            String host = webhcat.getHostNames().get(0);
+
+            result.add("http://" + host + ":" + port + "/templeton");
+        }
+        return result;
+    }
+
+
+    private List<String> createOozieURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
+        if (comp != null) {
+            result.add(comp.getConfigProperty("oozie.base.url"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHBaseURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
+        if (comp != null) {
+            for (String host : comp.getHostNames()) {
+                result.add("http://" + host + ":60080");
+            }
+        }
+
+        return result;
+    }
+
+
+    private List<String> createHiveURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
+        if (hive != null) {
+            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
+            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
+            String transport = hive.getConfigProperty("hive.server2.transport.mode");
+            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
+            String host = hive.getHostNames().get(0);
+
+            String scheme = null; // What is the scheme for the binary transport mode?
+            if ("http".equals(transport)) {
+                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
+            }
+
+            result.add(scheme + "://" + host + ":" + port + "/" + path);
+        }
+        return result;
+    }
+
+
+    private List<String> createResourceManagerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
+        if (resMan != null) {
+            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
+            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
+            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
+
+            result.add(scheme + "://" + webappAddress + "/ws");
+        }
+
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
deleted file mode 100644
index 1da4fc9..0000000
--- a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..0c232ad
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file


[05/22] knox git commit: KNOX-1046 - Add Client Cert Wanted Capability with Configurable Validation that Checks for It

Posted by mo...@apache.org.
KNOX-1046 - Add Client Cert Wanted Capability with Configurable Validation that Checks for It

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/5432c872
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/5432c872
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/5432c872

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 5432c872271e42d1ba8981e5f5de2059d5509ba2
Parents: 8537d42
Author: Larry McCay <lm...@hortonworks.com>
Authored: Fri Sep 22 13:40:18 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Fri Sep 22 13:40:31 2017 -0400

----------------------------------------------------------------------
 .../hadoop/gateway/config/impl/GatewayConfigImpl.java    | 10 ++++++++++
 .../gateway/services/security/impl/JettySSLService.java  | 11 +++++++++--
 .../org/apache/hadoop/gateway/config/GatewayConfig.java  |  2 ++
 .../org/apache/hadoop/gateway/GatewayTestConfig.java     |  5 +++++
 4 files changed, 26 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/5432c872/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index 3b7d19e..0956a4a 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -118,6 +118,7 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   public static final String FRONTEND_URL = GATEWAY_CONFIG_FILE_PREFIX + ".frontend.url";
   private static final String TRUST_ALL_CERTS = GATEWAY_CONFIG_FILE_PREFIX + ".trust.all.certs";
   private static final String CLIENT_AUTH_NEEDED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.needed";
+  private static final String CLIENT_AUTH_WANTED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.wanted";
   private static final String TRUSTSTORE_PATH = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.path";
   private static final String TRUSTSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.type";
   private static final String KEYSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".keystore.type";
@@ -535,6 +536,15 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   }
 
   /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#isClientAuthWanted()
+   */
+  @Override
+  public boolean isClientAuthWanted() {
+    String clientAuthWanted = get( CLIENT_AUTH_WANTED, "false" );
+    return "true".equals(clientAuthWanted);
+  }
+
+  /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.config.GatewayConfig#getTruststorePath()
    */
   @Override

http://git-wip-us.apache.org/repos/asf/knox/blob/5432c872/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java
index ac4bfa3..52c06d9 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java
@@ -63,6 +63,7 @@ public class JettySSLService implements SSLService {
   private String truststorePath;
   private String keystoreType;
   private String trustStoreType;
+  private boolean clientAuthWanted;
 
   public void setMasterService(MasterService ms) {
     this.ms = ms;
@@ -126,6 +127,7 @@ public class JettySSLService implements SSLService {
     sslExcludeCiphers = config.getExcludedSSLCiphers();
     sslExcludeProtocols = config.getExcludedSSLProtocols();
     clientAuthNeeded = config.isClientAuthNeeded();
+    clientAuthWanted = config.isClientAuthWanted();
     truststorePath = config.getTruststorePath();
     trustAllCerts = config.getTrustAllCerts();
     trustStoreType = config.getTruststoreType();
@@ -186,7 +188,7 @@ public class JettySSLService implements SSLService {
     sslContextFactory.setKeyManagerPassword(new String(keypass));
 
     String truststorePassword = null;
-    if (clientAuthNeeded) {
+    if (clientAuthNeeded || clientAuthWanted) {
       if (truststorePath != null) {
         sslContextFactory.setTrustStore(loadKeyStore(keystoreFileName, keystoreType, master));
         char[] truststorePwd = null;
@@ -212,7 +214,12 @@ public class JettySSLService implements SSLService {
         sslContextFactory.setTrustStoreType(keystoreType);
       }
     }
-    sslContextFactory.setNeedClientAuth( clientAuthNeeded );
+    if (clientAuthNeeded) {
+      sslContextFactory.setNeedClientAuth( clientAuthNeeded );
+    }
+    else {
+      sslContextFactory.setWantClientAuth( clientAuthWanted );
+    }
     sslContextFactory.setTrustAll( trustAllCerts );
     if (sslIncludeCiphers != null && !sslIncludeCiphers.isEmpty()) {
       sslContextFactory.setIncludeCipherSuites( sslIncludeCiphers.toArray(new String[sslIncludeCiphers.size()]) );

http://git-wip-us.apache.org/repos/asf/knox/blob/5432c872/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
index 506c31e..66fb83c 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
@@ -108,6 +108,8 @@ public interface GatewayConfig {
 
   boolean isClientAuthNeeded();
 
+  boolean isClientAuthWanted();
+
   String getTruststorePath();
 
   boolean getTrustAllCerts();

http://git-wip-us.apache.org/repos/asf/knox/blob/5432c872/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index 09b0d94..ff9a877 100644
--- a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -609,4 +609,9 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
   public boolean isGatewayServerHeaderEnabled() {
 	return false;
   }
+
+  @Override
+  public boolean isClientAuthWanted() {
+    return false;
+  }
 }


[02/22] knox git commit: KNOX-1036 - Fix a number of issues relating to JWTokenAuthority

Posted by mo...@apache.org.
KNOX-1036 - Fix a number of issues relating to JWTokenAuthority

Signed-off-by: Colm O hEigeartaigh <co...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c833bf90
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c833bf90
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c833bf90

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: c833bf907566301e525f514354dcb0325f5e0738
Parents: d3f507f
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Sep 20 11:26:33 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Thu Sep 21 15:42:41 2017 +0100

----------------------------------------------------------------------
 .../filter/JWTAccessTokenAssertionFilter.java   | 23 ++++++------
 .../jwt/filter/JWTAuthCodeAssertionFilter.java  | 16 ++++----
 .../federation/AbstractJWTFilterTest.java       | 19 +++++-----
 .../impl/DefaultTokenAuthorityService.java      | 21 ++++++-----
 .../service/knoxsso/WebSSOResourceTest.java     | 14 +++----
 .../knoxtoken/TokenServiceResourceTest.java     | 14 +++----
 .../security/token/JWTokenAuthority.java        | 19 +++++-----
 .../services/security/token/impl/JWT.java       | 39 +++++++++++---------
 .../services/security/token/impl/JWTToken.java  | 27 +++++++-------
 9 files changed, 97 insertions(+), 95 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
index f8d9a02..e2ef32e 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 import org.apache.hadoop.gateway.util.JsonUtils;
 
@@ -66,12 +67,12 @@ public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilt
     authority = (JWTokenAuthority) services.getService(GatewayServices.TOKEN_SERVICE);
     sr = (ServiceRegistry) services.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
   }
-  
+
   @Override
   public void doFilter(ServletRequest request, ServletResponse response,
       FilterChain chain) throws IOException, ServletException {
     String jsonResponse = null;
-    
+
     String header = ((HttpServletRequest) request).getHeader("Authorization");
     if (header != null && header.startsWith(BEARER)) {
       // what follows the bearer designator should be the JWT token being used to request or as an access token
@@ -94,7 +95,7 @@ public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilt
       else {
         throw new ServletException("Expected JWT Token not provided as Bearer token");
       }
-      
+
       // authorization of the user for the requested service (and resource?) should have been done by
       // the JWTFederationFilter - once we get here we can assume that it is authorized and we just need
       // to assert the identity via an access token
@@ -102,27 +103,27 @@ public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilt
       Subject subject = Subject.getSubject(AccessController.getContext());
       String principalName = getPrincipalName(subject);
       principalName = mapper.mapUserPrincipal(principalName);
-      
+
       // calculate expiration timestamp: validity * 1000 + currentTimeInMillis
       long expires = System.currentTimeMillis() + validity * 1000;
-      
+
       String serviceName = request.getParameter("service-name");
       String clusterName = request.getParameter("cluster-name");
       String accessToken = getAccessToken(principalName, serviceName, expires);
-      
+
       String serviceURL = sr.lookupServiceURL(clusterName, serviceName);
-      
+
       HashMap<String, Object> map = new HashMap<>();
       // TODO: populate map from JWT authorization code
       map.put(ACCESS_TOKEN, accessToken);
       map.put(TOKEN_TYPE, BEARER);
       map.put(EXPIRES_IN, expires);
-      
+
       // TODO: this url needs to be rewritten when in gateway deployments....
       map.put(SVC_URL, serviceURL);
-      
+
       jsonResponse = JsonUtils.renderAsJsonString(map);
-      
+
       response.getWriter().write(jsonResponse);
       //KNOX-685: response.getWriter().flush();
       return; // break filter chain
@@ -147,7 +148,7 @@ public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilt
         return principalName;
       }
     };
-    JWTToken token = null;
+    JWT token = null;
     try {
       token = authority.issueToken(p, serviceName, "RS256", expires);
       // Coverity CID 1327961

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
index 07cdf62..74b154f 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
@@ -33,12 +33,12 @@ import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.apache.hadoop.gateway.util.JsonUtils;
 
 public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter {
   private static final String BEARER = "Bearer ";
-  
+
   private JWTokenAuthority authority = null;
 
   private ServiceRegistry sr;
@@ -56,7 +56,7 @@ public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter
     authority = (JWTokenAuthority) services.getService(GatewayServices.TOKEN_SERVICE);
     sr = (ServiceRegistry) services.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
   }
-  
+
   @Override
   public void doFilter(ServletRequest request, ServletResponse response,
       FilterChain chain) throws IOException, ServletException {
@@ -64,15 +64,15 @@ public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter
       Subject subject = Subject.getSubject(AccessController.getContext());
       String principalName = getPrincipalName(subject);
       principalName = mapper.mapUserPrincipal(principalName);
-      JWTToken authCode;
+      JWT authCode;
       try {
         authCode = authority.issueToken(subject, "RS256");
         // get the url for the token service
-        String url = null; 
+        String url = null;
         if (sr != null) {
           url = sr.lookupServiceURL("token", "TGS");
         }
-        
+
         HashMap<String, Object> map = new HashMap<>();
         // TODO: populate map from JWT authorization code
         // Coverity CID 1327960
@@ -86,9 +86,9 @@ public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter
         if (url != null) {
           map.put("tke", url);
         }
-        
+
         String jsonResponse = JsonUtils.renderAsJsonString(map);
-        
+
         response.getWriter().write(jsonResponse);
         //KNOX-685: response.getWriter().flush();
       } catch (TokenServiceException e) {

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
index d477f1f..bdde3e6 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
@@ -56,7 +56,6 @@ import org.apache.hadoop.gateway.services.security.impl.X509CertificateUtil;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
 import org.apache.hadoop.gateway.services.security.token.impl.JWT;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Assert;
@@ -550,7 +549,7 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(javax.security.auth.Subject, java.lang.String)
      */
     @Override
-    public JWTToken issueToken(Subject subject, String algorithm)
+    public JWT issueToken(Subject subject, String algorithm)
         throws TokenServiceException {
       // TODO Auto-generated method stub
       return null;
@@ -560,7 +559,7 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String)
      */
     @Override
-    public JWTToken issueToken(Principal p, String algorithm)
+    public JWT issueToken(Principal p, String algorithm)
         throws TokenServiceException {
       // TODO Auto-generated method stub
       return null;
@@ -570,16 +569,16 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String)
      */
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm)
+    public JWT issueToken(Principal p, String audience, String algorithm)
         throws TokenServiceException {
       return null;
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#verifyToken(org.apache.hadoop.gateway.services.security.token.impl.JWTToken)
+     * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#verifyToken(org.apache.hadoop.gateway.services.security.token.impl.JWT)
      */
     @Override
-    public boolean verifyToken(JWTToken token) throws TokenServiceException {
+    public boolean verifyToken(JWT token) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) verifyingKey);
       return token.verify(verifier);
     }
@@ -588,13 +587,13 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String, long)
      */
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm,
+    public JWT issueToken(Principal p, String audience, String algorithm,
         long expires) throws TokenServiceException {
       return null;
     }
 
     @Override
-    public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
+    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
         long expires) throws TokenServiceException {
       return null;
     }
@@ -603,14 +602,14 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, long)
      */
     @Override
-    public JWT issueToken(Principal p, String audience, long l)
+    public JWT issueToken(Principal p, String algorithm, long expires)
         throws TokenServiceException {
       // TODO Auto-generated method stub
       return null;
     }
 
     @Override
-    public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
+    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
index fc0a266..33b86bd 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.gateway.services.security.KeystoreService;
 import org.apache.hadoop.gateway.services.security.KeystoreServiceException;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 
 import com.nimbusds.jose.JWSSigner;
@@ -63,28 +64,28 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
    * @see org.apache.hadoop.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(javax.security.auth.Subject, java.lang.String)
    */
   @Override
-  public JWTToken issueToken(Subject subject, String algorithm) throws TokenServiceException {
+  public JWT issueToken(Subject subject, String algorithm) throws TokenServiceException {
     Principal p = (Principal) subject.getPrincipals().toArray()[0];
     return issueToken(p, algorithm);
   }
-  
+
   /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String)
    */
   @Override
-  public JWTToken issueToken(Principal p, String algorithm) throws TokenServiceException {
+  public JWT issueToken(Principal p, String algorithm) throws TokenServiceException {
     return issueToken(p, null, algorithm);
   }
-  
+
   /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, long expires)
    */
   @Override
-  public JWTToken issueToken(Principal p, String algorithm, long expires) throws TokenServiceException {
+  public JWT issueToken(Principal p, String algorithm, long expires) throws TokenServiceException {
     return issueToken(p, (String)null, algorithm, expires);
   }
 
-  public JWTToken issueToken(Principal p, String audience, String algorithm)
+  public JWT issueToken(Principal p, String audience, String algorithm)
       throws TokenServiceException {
     return issueToken(p, audience, algorithm, -1);
   }
@@ -93,7 +94,7 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
    * @see org.apache.hadoop.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String)
    */
   @Override
-  public JWTToken issueToken(Principal p, String audience, String algorithm, long expires)
+  public JWT issueToken(Principal p, String audience, String algorithm, long expires)
       throws TokenServiceException {
     ArrayList<String> audiences = null;
     if (audience != null) {
@@ -104,7 +105,7 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
   }
 
   @Override
-  public JWTToken issueToken(Principal p, List<String> audiences, String algorithm, long expires)
+  public JWT issueToken(Principal p, List<String> audiences, String algorithm, long expires)
       throws TokenServiceException {
     String[] claimArray = new String[4];
     claimArray[0] = "KNOXSSO";
@@ -159,13 +160,13 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
   }
 
   @Override
-  public boolean verifyToken(JWTToken token)
+  public boolean verifyToken(JWT token)
       throws TokenServiceException {
     return verifyToken(token, null);
   }
 
   @Override
-  public boolean verifyToken(JWTToken token, RSAPublicKey publicKey)
+  public boolean verifyToken(JWT token, RSAPublicKey publicKey)
       throws TokenServiceException {
     boolean rc = false;
     PublicKey key;

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
index c953c91..4e9e76b 100644
--- a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
+++ b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
@@ -274,32 +274,32 @@ public class WebSSOResourceTest {
     }
 
     @Override
-    public JWTToken issueToken(Subject subject, String algorithm)
+    public JWT issueToken(Subject subject, String algorithm)
       throws TokenServiceException {
       Principal p = (Principal) subject.getPrincipals().toArray()[0];
       return issueToken(p, algorithm);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String algorithm)
+    public JWT issueToken(Principal p, String algorithm)
       throws TokenServiceException {
       return issueToken(p, null, algorithm);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm)
+    public JWT issueToken(Principal p, String audience, String algorithm)
       throws TokenServiceException {
       return issueToken(p, audience, algorithm, -1);
     }
 
     @Override
-    public boolean verifyToken(JWTToken token) throws TokenServiceException {
+    public boolean verifyToken(JWT token) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm,
+    public JWT issueToken(Principal p, String audience, String algorithm,
                                long expires) throws TokenServiceException {
       List<String> audiences = null;
       if (audience != null) {
@@ -310,7 +310,7 @@ public class WebSSOResourceTest {
     }
 
     @Override
-    public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
+    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
                                long expires) throws TokenServiceException {
       String[] claimArray = new String[4];
       claimArray[0] = "KNOXSSO";
@@ -341,7 +341,7 @@ public class WebSSOResourceTest {
     }
 
     @Override
-    public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
+    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
index 9faa073..bddd13d 100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@ -229,32 +229,32 @@ public class TokenServiceResourceTest {
     }
 
     @Override
-    public JWTToken issueToken(Subject subject, String algorithm)
+    public JWT issueToken(Subject subject, String algorithm)
       throws TokenServiceException {
       Principal p = (Principal) subject.getPrincipals().toArray()[0];
       return issueToken(p, algorithm);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String algorithm)
+    public JWT issueToken(Principal p, String algorithm)
       throws TokenServiceException {
       return issueToken(p, null, algorithm);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm)
+    public JWT issueToken(Principal p, String audience, String algorithm)
       throws TokenServiceException {
       return issueToken(p, audience, algorithm, -1);
     }
 
     @Override
-    public boolean verifyToken(JWTToken token) throws TokenServiceException {
+    public boolean verifyToken(JWT token) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm,
+    public JWT issueToken(Principal p, String audience, String algorithm,
                                long expires) throws TokenServiceException {
       ArrayList<String> audiences = null;
       if (audience != null) {
@@ -265,7 +265,7 @@ public class TokenServiceResourceTest {
     }
 
     @Override
-    public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
+    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
                                long expires) throws TokenServiceException {
       String[] claimArray = new String[4];
       claimArray[0] = "KNOXSSO";
@@ -296,7 +296,7 @@ public class TokenServiceResourceTest {
     }
 
     @Override
-    public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
+    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java
index 9cb82ec..155b239 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java
@@ -24,29 +24,28 @@ import java.util.List;
 import javax.security.auth.Subject;
 
 import org.apache.hadoop.gateway.services.security.token.impl.JWT;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 
 public interface JWTokenAuthority {
 
-  JWTToken issueToken(Subject subject, String algorithm)
+  JWT issueToken(Subject subject, String algorithm)
       throws TokenServiceException;
 
-  JWTToken issueToken(Principal p, String algorithm)
+  JWT issueToken(Principal p, String algorithm)
       throws TokenServiceException;
 
-  JWTToken issueToken(Principal p, String audience,
+  JWT issueToken(Principal p, String audience,
       String algorithm) throws TokenServiceException;
 
-  boolean verifyToken(JWTToken token) throws TokenServiceException;
+  boolean verifyToken(JWT token) throws TokenServiceException;
 
-  boolean verifyToken(JWTToken token, RSAPublicKey publicKey)
+  boolean verifyToken(JWT token, RSAPublicKey publicKey)
       throws TokenServiceException;
 
-  JWTToken issueToken(Principal p, String audience, String algorithm,
-      long expires) throws TokenServiceException;
+  JWT issueToken(Principal p, String algorithm, long expires) throws TokenServiceException;
 
-  JWT issueToken(Principal p, String audience, long l) throws TokenServiceException;
+  JWT issueToken(Principal p, String audience, String algorithm,
+      long expires) throws TokenServiceException;
 
-  JWTToken issueToken(Principal p, List<String> audience, String algorithm,
+  JWT issueToken(Principal p, List<String> audience, String algorithm,
       long expires) throws TokenServiceException;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
index b834649..1a6f4f9 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
@@ -20,41 +20,44 @@ package org.apache.hadoop.gateway.services.security.token.impl;
 import java.util.Date;
 
 import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.JWSVerifier;
 
 public interface JWT {
 
-  public static final String PRINCIPAL = "prn";
-  public static final String SUBJECT = "sub";
-  public static final String ISSUER = "iss";
-  public static final String AUDIENCE = "aud";
-  public static final String EXPIRES = "exp";
+  String PRINCIPAL = "prn";
+  String SUBJECT = "sub";
+  String ISSUER = "iss";
+  String AUDIENCE = "aud";
+  String EXPIRES = "exp";
 
-  public abstract String getPayload();
+  String getPayload();
 
-  public abstract void setSignaturePayload(byte[] payload);
+  void setSignaturePayload(byte[] payload);
 
-  public abstract byte[] getSignaturePayload();
+  byte[] getSignaturePayload();
 
-  public abstract String getClaim(String claimName);
+  String getClaim(String claimName);
 
-  public abstract String getPrincipal();
+  String getPrincipal();
 
-  public abstract String getIssuer();
+  String getIssuer();
 
-  public abstract String getAudience();
+  String getAudience();
 
   public String[] getAudienceClaims();
 
-  public abstract String getExpires();
+  String getExpires();
 
-  public abstract Date getExpiresDate();
+  Date getExpiresDate();
 
-  public abstract String getSubject();
+  String getSubject();
 
-  public abstract String getHeader();
+  String getHeader();
 
-  public abstract String getClaims();
+  String getClaims();
 
-  public abstract void sign(JWSSigner signer);
+  void sign(JWSSigner signer);
+
+  boolean verify(JWSVerifier verifier);
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
index cc2ccfe..49d8609 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
@@ -39,10 +39,10 @@ public class JWTToken implements JWT {
   private static JWTProviderMessages log = MessagesFactory.get( JWTProviderMessages.class );
 
   SignedJWT jwt = null;
-  
+
   private JWTToken(byte[] header, byte[] claims, byte[] signature) throws ParseException {
     try {
-      jwt = new SignedJWT(new Base64URL(new String(header, "UTF8")), new Base64URL(new String(claims, "UTF8")), 
+      jwt = new SignedJWT(new Base64URL(new String(header, "UTF8")), new Base64URL(new String(claims, "UTF8")),
           new Base64URL(new String(signature, "UTF8")));
     } catch (UnsupportedEncodingException e) {
       log.unsupportedEncoding(e);
@@ -79,7 +79,7 @@ public class JWTToken implements JWT {
     if(claimsArray[3] != null) {
       builder = builder.expirationTime(new Date(Long.parseLong(claimsArray[3])));
     }
-    
+
     claims = builder.build();
 
     jwt = new SignedJWT(header, claims);
@@ -151,7 +151,7 @@ public class JWTToken implements JWT {
 //    System.out.println("header: " + token.header);
 //    System.out.println("claims: " + token.claims);
 //    System.out.println("payload: " + new String(token.payload));
-    
+
     return jwt;
   }
 
@@ -161,13 +161,13 @@ public class JWTToken implements JWT {
   @Override
   public String getClaim(String claimName) {
     String claim = null;
-    
+
     try {
       claim = jwt.getJWTClaimsSet().getStringClaim(claimName);
     } catch (ParseException e) {
       log.unableToParseToken(e);
     }
-    
+
     return claim;
   }
 
@@ -246,9 +246,9 @@ public class JWTToken implements JWT {
     return getClaim(JWT.PRINCIPAL);
   }
 
-  
+
   /* (non-Javadoc)
-   * @see org.apache.hadoop.gateway.services.security.token.impl.JWT#getPrincipal()
+   * @see org.apache.hadoop.gateway.services.security.token.impl.JWT#sign(JWSSigner)
    */
   @Override
   public void sign(JWSSigner signer) {
@@ -259,20 +259,19 @@ public class JWTToken implements JWT {
     }
   }
 
-  /**
-   * @param verifier
-   * @return
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.services.security.token.impl.JWT#verify(JWSVerifier)
    */
   public boolean verify(JWSVerifier verifier) {
     boolean rc = false;
-    
+
     try {
       rc = jwt.verify(verifier);
     } catch (JOSEException e) {
       // TODO Auto-generated catch block
       log.unableToVerifyToken(e);
     }
-    
+
     return rc;
-  }  
+  }
 }


[13/22] knox git commit: KNOX-1014 - remove extraneous directory

Posted by mo...@apache.org.
KNOX-1014 - remove extraneous directory

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/a841e265
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/a841e265
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/a841e265

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: a841e2656b359e4f9bd6e819eba2b9723faec1bb
Parents: 78ef4e5
Author: Larry McCay <lm...@hortonworks.com>
Authored: Mon Sep 25 13:37:54 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Mon Sep 25 13:55:30 2017 -0400

----------------------------------------------------------------------
 b/gateway-discovery-ambari/pom.xml              |  66 --
 .../discovery/ambari/AmbariCluster.java         | 114 ---
 .../discovery/ambari/AmbariComponent.java       |  76 --
 .../ambari/AmbariServiceDiscovery.java          | 291 -------
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 --
 .../ambari/AmbariServiceDiscoveryType.java      |  35 -
 .../ambari/AmbariServiceURLCreator.java         | 184 ----
 ...eway.topology.discovery.ServiceDiscoveryType |  19 -
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 -------------------
 b/gateway-release/home/conf/descriptors/README  |   1 -
 .../home/conf/shared-providers/README           |   1 -
 11 files changed, 1724 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/pom.xml
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/pom.xml b/b/gateway-discovery-ambari/pom.xml
deleted file mode 100644
index 924e89c..0000000
--- a/b/gateway-discovery-ambari/pom.xml
+++ /dev/null
@@ -1,66 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.knox</groupId>
-        <artifactId>gateway</artifactId>
-        <version>0.14.0-SNAPSHOT</version>
-    </parent>
-    <artifactId>gateway-discovery-ambari</artifactId>
-
-    <name>gateway-discovery-ambari</name>
-    <description>The extension to the gateway for service discovery using Apache Ambari.</description>
-
-    <licenses>
-        <license>
-            <name>The Apache Software License, Version 2.0</name>
-            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
-            <distribution>repo</distribution>
-        </license>
-    </licenses>
-
-    <dependencies>
-        <dependency>
-            <groupId>${gateway-group}</groupId>
-            <artifactId>gateway-spi</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>${gateway-group}</groupId>
-            <artifactId>gateway-test-utils</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.easymock</groupId>
-            <artifactId>easymock</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-    </dependencies>
-
-</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
deleted file mode 100644
index 6eaabd3..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-class AmbariCluster implements ServiceDiscovery.Cluster {
-
-    private String name = null;
-
-    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
-    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
-
-    private Map<String, AmbariComponent> components = null;
-
-
-    AmbariCluster(String name) {
-        this.name = name;
-        components = new HashMap<String, AmbariComponent>();
-    }
-
-    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
-        if (!serviceConfigurations.keySet().contains(serviceName)) {
-            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
-        }
-        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
-    }
-
-
-    void addComponent(AmbariComponent component) {
-        components.put(component.getName(), component);
-    }
-
-
-    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
-        ServiceConfiguration sc = null;
-        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
-        if (configs != null) {
-            sc = configs.get(configurationType);
-        }
-        return sc;
-    }
-
-
-    Map<String, AmbariComponent> getComponents() {
-        return components;
-    }
-
-
-    AmbariComponent getComponent(String name) {
-        return components.get(name);
-    }
-
-
-    @Override
-    public String getName() {
-        return name;
-    }
-
-
-    @Override
-    public List<String> getServiceURLs(String serviceName) {
-        List<String> urls = new ArrayList<>();
-        urls.addAll(urlCreator.create(this, serviceName));
-        return urls;
-    }
-
-
-    static class ServiceConfiguration {
-
-        private String type;
-        private String version;
-        private Map<String, String> props;
-
-        ServiceConfiguration(String type, String version, Map<String, String> properties) {
-            this.type = type;
-            this.version = version;
-            this.props = properties;
-        }
-
-        public String getVersion() {
-            return version;
-        }
-
-        public String getType() {
-            return type;
-        }
-
-        public Map<String, String> getProperties() {
-            return props;
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
deleted file mode 100644
index 55257fb..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import java.util.List;
-import java.util.Map;
-
-class AmbariComponent {
-
-    private String clusterName = null;
-    private String serviceName = null;
-    private String name        = null;
-    private String version     = null;
-
-    private List<String> hostNames = null;
-
-    private Map<String, String> properties = null;
-
-    AmbariComponent(String              name,
-                    String              version,
-                    String              cluster,
-                    String              service,
-                    List<String>        hostNames,
-                    Map<String, String> properties) {
-        this.name = name;
-        this.serviceName = service;
-        this.clusterName = cluster;
-        this.version = version;
-        this.hostNames = hostNames;
-        this.properties = properties;
-    }
-
-    public String getVersion() {
-        return version;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public String getServiceName() {
-        return serviceName;
-    }
-
-    public String getClusterName() {
-        return clusterName;
-    }
-
-    public List<String> getHostNames() {
-        return hostNames;
-    }
-
-    public Map<String, String> getConfigProperties() {
-        return properties;
-    }
-
-    public String getConfigProperty(String propertyName) {
-        return properties.get(propertyName);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
deleted file mode 100644
index 34f20a7..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import net.minidev.json.JSONArray;
-import net.minidev.json.JSONObject;
-import net.minidev.json.JSONValue;
-import org.apache.hadoop.gateway.config.ConfigurationException;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.AliasServiceException;
-import org.apache.hadoop.gateway.topology.discovery.GatewayService;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpStatus;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.message.BasicHeader;
-import org.apache.http.util.EntityUtils;
-
-import java.io.IOException;
-import java.util.*;
-
-
-class AmbariServiceDiscovery implements ServiceDiscovery {
-
-    static final String TYPE = "AMBARI";
-
-    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
-
-    static final String AMBARI_HOSTROLES_URI =
-                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
-
-    static final String AMBARI_SERVICECONFIGS_URI =
-            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
-
-    // Map of component names to service configuration types
-    private static Map<String, String> componentServiceConfigs = new HashMap<>();
-    static {
-        componentServiceConfigs.put("NAMENODE", "hdfs-site");
-        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
-        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
-        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
-        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
-        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
-    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
-
-    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
-    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
-
-    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
-    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
-    @GatewayService
-    private AliasService aliasService;
-
-    private CloseableHttpClient httpClient = null;
-
-    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
-
-
-    AmbariServiceDiscovery() {
-        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
-    }
-
-
-    @Override
-    public String getType() {
-        return TYPE;
-    }
-
-
-    @Override
-    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
-        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
-
-        String discoveryAddress = config.getAddress();
-
-        // Invoke Ambari REST API to discover the available clusters
-        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
-
-        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
-
-        // Parse the cluster names from the response, and perform the cluster discovery
-        JSONArray clusterItems = (JSONArray) json.get("items");
-        for (Object clusterItem : clusterItems) {
-            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
-            try {
-                Cluster c = discover(config, clusterName);
-                clusters.put(clusterName, c);
-            } catch (Exception e) {
-                log.clusterDiscoveryError(clusterName, e);
-            }
-        }
-
-        return clusters;
-    }
-
-
-    @Override
-    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
-        AmbariCluster cluster = new AmbariCluster(clusterName);
-
-        Map<String, String> serviceComponents = new HashMap<>();
-
-        String discoveryAddress = config.getAddress();
-        String discoveryUser = config.getUser();
-        String discoveryPwdAlias = config.getPasswordAlias();
-
-        Map<String, List<String>> componentHostNames = new HashMap<>();
-        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
-        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
-        if (hostRolesJSON != null) {
-            // Process the host roles JSON
-            JSONArray items = (JSONArray) hostRolesJSON.get("items");
-            for (Object obj : items) {
-                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
-                for (Object component : components) {
-                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
-                    for (Object hostComponent : hostComponents) {
-                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
-                        String serviceName = (String) hostRoles.get("service_name");
-                        String componentName = (String) hostRoles.get("component_name");
-
-                        serviceComponents.put(componentName, serviceName);
-
-//                    String hostName = (String) hostRoles.get("host_name");
-                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
-                        log.discoveredServiceHost(serviceName, hostName);
-                        if (!componentHostNames.containsKey(componentName)) {
-                            componentHostNames.put(componentName, new ArrayList<String>());
-                        }
-                        componentHostNames.get(componentName).add(hostName);
-                    }
-                }
-            }
-        }
-
-        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
-                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
-        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
-        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
-        if (serviceConfigsJSON != null) {
-            // Process the service configurations
-            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
-            for (Object serviceConfig : serviceConfigs) {
-                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
-                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
-                for (Object configuration : configurations) {
-                    String configType = (String) ((JSONObject) configuration).get("type");
-                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
-
-                    Map<String, String> configProps = new HashMap<String, String>();
-                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
-                    for (String propertyName : configProperties.keySet()) {
-                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
-                    }
-                    if (!serviceConfigurations.containsKey(serviceName)) {
-                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
-                    }
-                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                }
-            }
-        }
-
-        // Construct the AmbariCluster model
-        for (String componentName : serviceComponents.keySet()) {
-            String serviceName = serviceComponents.get(componentName);
-            List<String> hostNames = componentHostNames.get(componentName);
-
-            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
-            String configType = componentServiceConfigs.get(componentName);
-            if (configType != null) {
-                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
-                AmbariComponent c = new AmbariComponent(componentName,
-                                                        svcConfig.getVersion(),
-                                                        clusterName,
-                                                        serviceName,
-                                                        hostNames,
-                                                        svcConfig.getProperties());
-                cluster.addComponent(c);
-            }
-        }
-
-        return cluster;
-    }
-
-
-    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
-        JSONObject result = null;
-
-        CloseableHttpResponse response = null;
-        try {
-            HttpGet request = new HttpGet(url);
-
-            // If no configured username, then use default username alias
-            String password = null;
-            if (username == null) {
-                if (aliasService != null) {
-                    try {
-                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
-                        if (defaultUser != null) {
-                            username = new String(defaultUser);
-                        }
-                    } catch (AliasServiceException e) {
-                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
-                    }
-                }
-
-                // If username is still null
-                if (username == null) {
-                    log.aliasServiceUserNotFound();
-                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
-                }
-            }
-
-            if (aliasService != null) {
-                // If not password alias is configured, then try the default alias
-                if (passwordAlias == null) {
-                    passwordAlias = DEFAULT_PWD_ALIAS;
-                }
-                try {
-                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
-                    if (pwd != null) {
-                        password = new String(pwd);
-                    }
-
-                } catch (AliasServiceException e) {
-                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
-                }
-            }
-
-            // If the password could not be determined
-            if (password == null) {
-                log.aliasServicePasswordNotFound();
-                throw new ConfigurationException("No password is configured for Ambari service discovery.");
-            }
-
-            // Add an auth header if credentials are available
-            String encodedCreds =
-                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
-            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
-
-            response = httpClient.execute(request);
-
-            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
-                HttpEntity entity = response.getEntity();
-                if (entity != null) {
-                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
-                    log.debugJSON(result.toJSONString());
-                } else {
-                    log.noJSON(url);
-                }
-            } else {
-                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
-            }
-
-        } catch (IOException e) {
-            log.restInvocationError(url, e);
-        } finally {
-            if(response != null) {
-                try {
-                    response.close();
-                } catch (IOException e) {
-                    // Ignore
-                }
-            }
-        }
-        return result;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
deleted file mode 100644
index caa16ed..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-
-@Messages(logger="org.apache.gateway.topology.discovery.ambari")
-public interface AmbariServiceDiscoveryMessages {
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error during cluster {0} discovery: {1}")
-    void clusterDiscoveryError(final String clusterName,
-                               @StackTrace(level = MessageLevel.ERROR) Exception e);
-
-
-    @Message(level = MessageLevel.DEBUG,
-            text = "REST invocation {0} failed: {1}")
-    void restInvocationError(final String url,
-                             @StackTrace(level = MessageLevel.ERROR) Exception e);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
-    void aliasServiceUserError(final String alias, final String error);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
-    void aliasServicePasswordError(final String alias, final String error);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "No user configured for Ambari service discovery.")
-    void aliasServiceUserNotFound();
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "No password configured for Ambari service discovery.")
-    void aliasServicePasswordNotFound();
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Unexpected REST invocation response code for {0} : {1}")
-    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "REST invocation {0} yielded a response without any JSON.")
-    void noJSON(final String url);
-
-
-    @Message(level = MessageLevel.DEBUG,
-            text = "REST invocation result: {0}")
-    void debugJSON(final String json);
-
-
-    @Message(level = MessageLevel.INFO,
-            text = "Discovered: Service: {0}, Host: {1}")
-    void discoveredServiceHost(final String serviceName, final String hostName);
-
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
deleted file mode 100644
index 723a786..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
-
-public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
-
-    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
-
-    @Override
-    public String getType() {
-        return AmbariServiceDiscovery.TYPE;
-    }
-
-    @Override
-    public ServiceDiscovery newInstance() {
-        return new AmbariServiceDiscovery();
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
deleted file mode 100644
index 0674642..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-
-import java.util.ArrayList;
-import java.util.List;
-
-class AmbariServiceURLCreator {
-
-    private static final String NAMENODE_SERVICE        = "NAMENODE";
-    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
-    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
-    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
-    private static final String OOZIE_SERVICE           = "OOZIE";
-    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
-    private static final String HIVE_SERVICE            = "HIVE";
-    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
-
-
-    /**
-     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
-     *
-     * @param cluster The cluster discovery results
-     * @param serviceName The name of a Hadoop service
-     *
-     * @return One or more endpoint URLs for the specified service.
-     */
-    public List<String> create(AmbariCluster cluster, String serviceName) {
-        List<String> result = null;
-
-        if (NAMENODE_SERVICE.equals(serviceName)) {
-            result = createNameNodeURL(cluster);
-        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
-            result = createJobTrackerURL(cluster);
-        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
-            result = createWebHDFSURL(cluster);
-        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
-            result = createWebHCatURL(cluster);
-        } else if (OOZIE_SERVICE.equals(serviceName)) {
-            result = createOozieURL(cluster);
-        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
-            result = createWebHBaseURL(cluster);
-        } else if (HIVE_SERVICE.equals(serviceName)) {
-            result = createHiveURL(cluster);
-        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
-            result = createResourceManagerURL(cluster);
-        }
-
-        return result;
-    }
-
-
-    private List<String> createNameNodeURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("NAMENODE");
-        if (comp != null) {
-            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createJobTrackerURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
-        if (comp != null) {
-            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHDFSURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
-        if (sc != null) {
-            String address = sc.getProperties().get("dfs.namenode.http-address");
-            result.add("http://" + address + "/webhdfs");
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHCatURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
-        if (webhcat != null) {
-            String port = webhcat.getConfigProperty("templeton.port");
-            String host = webhcat.getHostNames().get(0);
-
-            result.add("http://" + host + ":" + port + "/templeton");
-        }
-        return result;
-    }
-
-
-    private List<String> createOozieURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
-        if (comp != null) {
-            result.add(comp.getConfigProperty("oozie.base.url"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHBaseURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
-        if (comp != null) {
-            for (String host : comp.getHostNames()) {
-                result.add("http://" + host + ":60080");
-            }
-        }
-
-        return result;
-    }
-
-
-    private List<String> createHiveURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
-        if (hive != null) {
-            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
-            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
-            String transport = hive.getConfigProperty("hive.server2.transport.mode");
-            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
-            String host = hive.getHostNames().get(0);
-
-            String scheme = null; // What is the scheme for the binary transport mode?
-            if ("http".equals(transport)) {
-                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
-            }
-
-            result.add(scheme + "://" + host + ":" + port + "/" + path);
-        }
-        return result;
-    }
-
-
-    private List<String> createResourceManagerURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
-        if (resMan != null) {
-            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
-            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
-            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
-
-            result.add(scheme + "://" + webappAddress + "/ws");
-        }
-
-        return result;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
deleted file mode 100644
index 1da4fc9..0000000
--- a/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file


[21/22] knox git commit: KNOX-998 - Merging from current master

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
deleted file mode 100644
index 1e5e7b2..0000000
--- a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ /dev/null
@@ -1,856 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import net.minidev.json.JSONObject;
-import net.minidev.json.JSONValue;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.*;
-
-
-/**
- * Test the Ambari ServiceDiscovery implementation.
- *
- * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
- *      treatment of the responses as they were observed at the time the tests are developed.
- */
-public class AmbariServiceDiscoveryTest {
-
-    @Test
-    public void testSingleClusterDiscovery() throws Exception {
-        final String discoveryAddress = "http://ambarihost:8080";
-        final String clusterName = "testCluster";
-        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
-
-        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
-        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
-        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
-        EasyMock.replay(sdc);
-
-        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
-        assertNotNull(cluster);
-        assertEquals(clusterName, cluster.getName());
-        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
-        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
-
-//        printServiceURLs(cluster);
-    }
-
-
-    @Test
-    public void testBulkClusterDiscovery() throws Exception {
-        final String discoveryAddress = "http://ambarihost:8080";
-        final String clusterName = "anotherCluster";
-        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
-
-        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
-        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
-        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
-        EasyMock.replay(sdc);
-
-        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
-        assertNotNull(clusters);
-        assertEquals(1, clusters.size());
-        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
-        assertNotNull(cluster);
-        assertEquals(clusterName, cluster.getName());
-        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
-        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
-
-//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
-    }
-
-
-    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
-        final String[] services = new String[]{"NAMENODE",
-                                               "JOBTRACKER",
-                                               "WEBHDFS",
-                                               "WEBHCAT",
-                                               "OOZIE",
-                                               "WEBHBASE",
-                                               "HIVE",
-                                               "RESOURCEMANAGER"};
-        printServiceURLs(cluster, services);
-    }
-
-
-    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
-        for (String name : services) {
-            StringBuilder sb = new StringBuilder();
-            List<String> urls = cluster.getServiceURLs(name);
-            if (urls != null && !urls.isEmpty()) {
-                for (String url : urls) {
-                    sb.append(url);
-                    sb.append(" ");
-                }
-            }
-            System.out.println(String.format("%18s: %s", name, sb.toString()));
-        }
-    }
-
-
-    /**
-     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
-     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
-     */
-    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
-
-        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
-
-        private Map<String, JSONObject> cannedResponses = new HashMap<>();
-
-        TestAmbariServiceDiscovery(String clusterName) {
-            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
-                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                               clusterName)));
-
-            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
-                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                clusterName)));
-
-            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
-                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                     clusterName)));
-        }
-
-        @Override
-        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
-            return cannedResponses.get(url.substring(url.indexOf("/api")));
-        }
-    }
-
-
-    ////////////////////////////////////////////////////////////////////////
-    //  JSON response templates, based on actual response content excerpts
-    ////////////////////////////////////////////////////////////////////////
-
-    private static final String CLUSTERS_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"Clusters\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"version\" : \"HDP-2.6\"\n" +
-    "      }\n" +
-    "    }\n" +
-    "  ]" +
-    "}";
-
-
-    private static final String HOSTROLES_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
-    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HBASE_MASTER\",\n" +
-    "            \"service_name\" : \"HBASE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HBASE_MASTER\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HBASE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"HDFS\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"NAMENODE\",\n" +
-    "            \"service_name\" : \"HDFS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"NAMENODE\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HDFS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
-    "            \"service_name\" : \"HDFS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HDFS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"HIVE\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HCAT\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HCAT\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HIVE_SERVER\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HIVE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"OOZIE\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
-    "            \"service_name\" : \"OOZIE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"OOZIE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"YARN\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"NODEMANAGER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"NODEMANAGER\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
-    "                \"ha_state\" : \"ACTIVE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"ZOOKEEPER\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "            \"service_name\" : \"ZOOKEEPER\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            },\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            },\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    }\n" +
-    "  ]\n" +
-    "}\n";
-
-
-    private static final String SERVICECONFIGS_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hbase-site\",\n" +
-    "          \"tag\" : \"version1503410563715\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
-    "            \"hbase.master.info.port\" : \"16010\",\n" +
-    "            \"hbase.master.port\" : \"16000\",\n" +
-    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
-    "            \"hbase.regionserver.port\" : \"16020\",\n" +
-    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
-    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
-    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
-    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
-    "      \"service_name\" : \"HBASE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hdfs-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
-    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
-    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
-    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
-    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
-    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
-    "            \"dfs.https.port\" : \"50470\",\n" +
-    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
-    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
-    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
-    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
-    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
-    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
-    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"final\" : {\n" +
-    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
-    "              \"dfs.namenode.http-address\" : \"true\",\n" +
-    "              \"dfs.support.append\" : \"true\",\n" +
-    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
-    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
-    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"core-site\",\n" +
-    "          \"tag\" : \"version1502131215159\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
-    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"final\" : {\n" +
-    "              \"fs.defaultFS\" : \"true\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 2,\n" +
-    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
-    "      \"service_name\" : \"HDFS\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-env\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive_security_authorization\" : \"None\",\n" +
-    "            \"webhcat_user\" : \"hcat\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hiveserver2-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
-    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
-    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
-    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-interactive-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
-    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
-    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
-    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
-    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
-    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
-    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
-    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
-    "            \"hive.server2.webui.port\" : \"10502\",\n" +
-    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
-    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"tez-interactive-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
-    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
-    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-site\",\n" +
-    "          \"tag\" : \"version1502130841736\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
-    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
-    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
-    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
-    "            \"hive.server2.authentication\" : \"NONE\",\n" +
-    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
-    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
-    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
-    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
-    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
-    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
-    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
-    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
-    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
-    "            \"hive.server2.transport.mode\" : \"http\",\n" +
-    "            \"hive.server2.use.SSL\" : \"false\",\n" +
-    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"hidden\" : {\n" +
-    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"webhcat-site\",\n" +
-    "          \"tag\" : \"version1502131111746\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"templeton.port\" : \"50111\",\n" +
-    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
-    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
-    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
-    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
-    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"createtime\" : 1502131110745,\n" +
-    "      \"group_id\" : -1,\n" +
-    "      \"group_name\" : \"Default\",\n" +
-    "      \"hosts\" : [ ],\n" +
-    "      \"is_cluster_compatible\" : true,\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 3,\n" +
-    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
-    "      \"service_name\" : \"HIVE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"oozie-site\",\n" +
-    "          \"tag\" : \"version1502131137103\",\n" +
-    "          \"version\" : 3,\n" +
-    "          \"properties\" : {\n" +
-    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 3,\n" +
-    "      \"service_name\" : \"OOZIE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"tez-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"createtime\" : 1502122253525,\n" +
-    "      \"group_id\" : -1,\n" +
-    "      \"group_name\" : \"Default\",\n" +
-    "      \"hosts\" : [ ],\n" +
-    "      \"is_cluster_compatible\" : true,\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
-    "      \"service_name\" : \"TEZ\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"yarn-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
-    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
-    "            \"yarn.acl.enable\" : \"false\",\n" +
-    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
-    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
-    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
-    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
-    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
-    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
-    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
-    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
-    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
-    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
-    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
-    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
-    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_name\" : \"YARN\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    }\n" +
-    "  ]\n" +
-    "}";
-
-}


[08/22] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
new file mode 100644
index 0000000..1e5e7b2
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -0,0 +1,856 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+
+/**
+ * Test the Ambari ServiceDiscovery implementation.
+ *
+ * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
+ *      treatment of the responses as they were observed at the time the tests are developed.
+ */
+public class AmbariServiceDiscoveryTest {
+
+    @Test
+    public void testSingleClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "testCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster);
+    }
+
+
+    @Test
+    public void testBulkClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "anotherCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
+        assertNotNull(clusters);
+        assertEquals(1, clusters.size());
+        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
+        final String[] services = new String[]{"NAMENODE",
+                                               "JOBTRACKER",
+                                               "WEBHDFS",
+                                               "WEBHCAT",
+                                               "OOZIE",
+                                               "WEBHBASE",
+                                               "HIVE",
+                                               "RESOURCEMANAGER"};
+        printServiceURLs(cluster, services);
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            StringBuilder sb = new StringBuilder();
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    sb.append(url);
+                    sb.append(" ");
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, sb.toString()));
+        }
+    }
+
+
+    /**
+     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
+     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
+     */
+    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
+
+        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
+
+        private Map<String, JSONObject> cannedResponses = new HashMap<>();
+
+        TestAmbariServiceDiscovery(String clusterName) {
+            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
+                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                               clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
+                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
+                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                     clusterName)));
+        }
+
+        @Override
+        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+            return cannedResponses.get(url.substring(url.indexOf("/api")));
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////
+    //  JSON response templates, based on actual response content excerpts
+    ////////////////////////////////////////////////////////////////////////
+
+    private static final String CLUSTERS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"Clusters\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"version\" : \"HDP-2.6\"\n" +
+    "      }\n" +
+    "    }\n" +
+    "  ]" +
+    "}";
+
+
+    private static final String HOSTROLES_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HBASE_MASTER\",\n" +
+    "            \"service_name\" : \"HBASE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HBASE_MASTER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HBASE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HDFS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NAMENODE\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HIVE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HCAT\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HCAT\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"OOZIE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "            \"service_name\" : \"OOZIE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"OOZIE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"YARN\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NODEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NODEMANAGER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "                \"ha_state\" : \"ACTIVE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"ZOOKEEPER\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "            \"service_name\" : \"ZOOKEEPER\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}\n";
+
+
+    private static final String SERVICECONFIGS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hbase-site\",\n" +
+    "          \"tag\" : \"version1503410563715\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
+    "            \"hbase.master.info.port\" : \"16010\",\n" +
+    "            \"hbase.master.port\" : \"16000\",\n" +
+    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
+    "            \"hbase.regionserver.port\" : \"16020\",\n" +
+    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
+    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
+    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
+    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
+    "      \"service_name\" : \"HBASE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hdfs-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
+    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
+    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
+    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
+    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
+    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"dfs.https.port\" : \"50470\",\n" +
+    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
+    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
+    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
+    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
+    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
+    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
+    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
+    "              \"dfs.namenode.http-address\" : \"true\",\n" +
+    "              \"dfs.support.append\" : \"true\",\n" +
+    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
+    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
+    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"core-site\",\n" +
+    "          \"tag\" : \"version1502131215159\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
+    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"fs.defaultFS\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 2,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HDFS\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-env\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive_security_authorization\" : \"None\",\n" +
+    "            \"webhcat_user\" : \"hcat\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hiveserver2-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
+    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
+    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
+    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
+    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
+    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
+    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
+    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
+    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
+    "            \"hive.server2.webui.port\" : \"10502\",\n" +
+    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
+    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
+    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-site\",\n" +
+    "          \"tag\" : \"version1502130841736\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
+    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
+    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
+    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
+    "            \"hive.server2.authentication\" : \"NONE\",\n" +
+    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
+    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
+    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
+    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
+    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
+    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
+    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
+    "            \"hive.server2.transport.mode\" : \"http\",\n" +
+    "            \"hive.server2.use.SSL\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"hidden\" : {\n" +
+    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"webhcat-site\",\n" +
+    "          \"tag\" : \"version1502131111746\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"templeton.port\" : \"50111\",\n" +
+    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
+    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502131110745,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HIVE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"oozie-site\",\n" +
+    "          \"tag\" : \"version1502131137103\",\n" +
+    "          \"version\" : 3,\n" +
+    "          \"properties\" : {\n" +
+    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_name\" : \"OOZIE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502122253525,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
+    "      \"service_name\" : \"TEZ\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"yarn-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
+    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"yarn.acl.enable\" : \"false\",\n" +
+    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
+    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
+    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
+    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
+    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
+    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
+    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
+    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
+    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
+    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_name\" : \"YARN\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/home/conf/descriptors/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/descriptors/README b/gateway-release/home/conf/descriptors/README
new file mode 100644
index 0000000..a2e5226
--- /dev/null
+++ b/gateway-release/home/conf/descriptors/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SIMPLE TOPOLOGY DESCRIPTORS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/home/conf/shared-providers/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/shared-providers/README b/gateway-release/home/conf/shared-providers/README
new file mode 100644
index 0000000..44d12a3
--- /dev/null
+++ b/gateway-release/home/conf/shared-providers/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SHARED PROVIDER CONFIGURATIONS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 83824cd..cbff307 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -334,6 +334,10 @@
             <groupId>${gateway-group}</groupId>
             <artifactId>gateway-shell-samples</artifactId>
         </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-discovery-ambari</artifactId>
+        </dependency>
 
         <dependency>
             <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 437d22d..5ebf793 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -248,6 +248,11 @@
            <artifactId>javax-websocket-client-impl</artifactId>
         </dependency>
 
+        <dependency>
+            <groupId>com.fasterxml.jackson.dataformat</groupId>
+            <artifactId>jackson-dataformat-yaml</artifactId>
+            <version>2.3.0</version>
+        </dependency>
 
         <!-- ********** ********** ********** ********** ********** ********** -->
         <!-- ********** Test Dependencies                           ********** -->

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index 1f94584..6f73c1e 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -511,6 +511,11 @@ public interface GatewayMessages {
                " Could not find topology \"{0}\" mapped to port \"{1}\" configured in gateway-config.xml. "
                    + "This invalid topology mapping will be ignored by the gateway. "
                    + "Gateway restart will be required if in the future \"{0}\" topology is added.")
-  void topologyPortMappingCannotFindTopology(final String topology,
-      final int port);
+  void topologyPortMappingCannotFindTopology(final String topology, final int port);
+
+
+  @Message( level = MessageLevel.ERROR, text = "An error occurred while processing {0} : {1}" )
+  void simpleDescriptorHandlingError(final String simpleDesc,
+                                     @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
index cefada1..02ac154 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
@@ -105,14 +105,13 @@ public class DefaultGatewayServices implements GatewayServices {
     services.put( SERVER_INFO_SERVICE, sis );
 
     DefaultTopologyService tops = new DefaultTopologyService();
+    tops.setAliasService(alias);
     tops.init(  config, options  );
     services.put(  TOPOLOGY_SERVICE, tops  );
 
     DefaultServiceDefinitionRegistry sdr = new DefaultServiceDefinitionRegistry();
     sdr.init( config, options );
     services.put( SERVICE_DEFINITION_REGISTRY, sdr );
-    tops.init( config, options );
-    services.put( TOPOLOGY_SERVICE, tops );
 
     DefaultMetricsService metricsService = new DefaultMetricsService();
     metricsService.init( config, options );

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
index 3321f3d..a493bc4 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.gateway.config.GatewayConfig;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 import org.apache.hadoop.gateway.service.definition.ServiceDefinition;
 import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.topology.TopologyService;
 import org.apache.hadoop.gateway.topology.Topology;
 import org.apache.hadoop.gateway.topology.TopologyEvent;
@@ -45,6 +46,7 @@ import org.apache.hadoop.gateway.topology.TopologyListener;
 import org.apache.hadoop.gateway.topology.TopologyMonitor;
 import org.apache.hadoop.gateway.topology.TopologyProvider;
 import org.apache.hadoop.gateway.topology.builder.TopologyBuilder;
+import org.apache.hadoop.gateway.topology.simple.SimpleDescriptorHandler;
 import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
 import org.apache.hadoop.gateway.topology.xml.AmbariFormatXmlTopologyRules;
 import org.apache.hadoop.gateway.topology.xml.KnoxFormatXmlTopologyRules;
@@ -74,20 +76,27 @@ import static org.apache.commons.digester3.binder.DigesterLoader.newLoader;
 public class DefaultTopologyService
     extends FileAlterationListenerAdaptor
     implements TopologyService, TopologyMonitor, TopologyProvider, FileFilter, FileAlterationListener {
+
   private static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor(
     AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
     AuditConstants.KNOX_COMPONENT_NAME);
+
   private static final List<String> SUPPORTED_TOPOLOGY_FILE_EXTENSIONS = new ArrayList<String>();
   static {
     SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("xml");
     SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("conf");
   }
+
   private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
   private static DigesterLoader digesterLoader = newLoader(new KnoxFormatXmlTopologyRules(), new AmbariFormatXmlTopologyRules());
-  private FileAlterationMonitor monitor;
-  private File directory;
+  private List<FileAlterationMonitor> monitors = new ArrayList<>();
+  private File topologiesDirectory;
+  private File descriptorsDirectory;
+
   private Set<TopologyListener> listeners;
   private volatile Map<File, Topology> topologies;
+  private AliasService aliasService;
+
 
   private Topology loadTopology(File file) throws IOException, SAXException, URISyntaxException, InterruptedException {
     final long TIMEOUT = 250; //ms
@@ -202,29 +211,40 @@ public class DefaultTopologyService
   }
 
   private File calculateAbsoluteTopologiesDir(GatewayConfig config) {
-
-    File topoDir = new File(config.getGatewayTopologyDir());
+    String normalizedTopologyDir = FilenameUtils.normalize(config.getGatewayTopologyDir());
+    File topoDir = new File(normalizedTopologyDir);
     topoDir = topoDir.getAbsoluteFile();
     return topoDir;
   }
 
-  private void initListener(FileAlterationMonitor monitor, File directory) {
-    this.directory = directory;
-    this.monitor = monitor;
+  private File calculateAbsoluteConfigDir(GatewayConfig config) {
+    File configDir = null;
 
+    String path = FilenameUtils.normalize(config.getGatewayConfDir());
+    if (path != null) {
+      configDir = new File(config.getGatewayConfDir());
+    } else {
+      configDir = (new File(config.getGatewayTopologyDir())).getParentFile();
+    }
+    configDir = configDir.getAbsoluteFile();
 
-    FileAlterationObserver observer = new FileAlterationObserver(this.directory, this);
-    observer.addListener(this);
-    monitor.addObserver(observer);
+    return configDir;
+  }
 
-    this.listeners = new HashSet<>();
-    this.topologies = new HashMap<>(); //loadTopologies( this.directory );
+  private void  initListener(FileAlterationMonitor  monitor,
+                            File                   directory,
+                            FileFilter             filter,
+                            FileAlterationListener listener) {
+    monitors.add(monitor);
+    FileAlterationObserver observer = new FileAlterationObserver(directory, filter);
+    observer.addListener(listener);
+    monitor.addObserver(observer);
   }
 
-  private void initListener(File directory) throws IOException, SAXException {
+  private void initListener(File directory, FileFilter filter, FileAlterationListener listener) throws IOException, SAXException {
     // Increasing the monitoring interval to 5 seconds as profiling has shown
     // this is rather expensive in terms of generated garbage objects.
-    initListener(new FileAlterationMonitor(5000L), directory);
+    initListener(new FileAlterationMonitor(5000L), directory, filter, listener);
   }
 
   private Map<File, Topology> loadTopologies(File directory) {
@@ -261,10 +281,14 @@ public class DefaultTopologyService
     return map;
   }
 
+  public void setAliasService(AliasService as) {
+    this.aliasService = as;
+  }
+
   public void deployTopology(Topology t){
 
     try {
-      File temp = new File(directory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
+      File temp = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
       Package topologyPkg = Topology.class.getPackage();
       String pkgName = topologyPkg.getName();
       String bindingFile = pkgName.replace(".", "/") + "/topology_binding-xml.xml";
@@ -277,7 +301,7 @@ public class DefaultTopologyService
       mr.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
       mr.marshal(t, temp);
 
-      File topology = new File(directory.getAbsolutePath() + "/" + t.getName() + ".xml");
+      File topology = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml");
       if(!temp.renameTo(topology)) {
         FileUtils.forceDelete(temp);
         throw new IOException("Could not rename temp file");
@@ -317,7 +341,7 @@ public class DefaultTopologyService
     try {
       synchronized (this) {
         Map<File, Topology> oldTopologies = topologies;
-        Map<File, Topology> newTopologies = loadTopologies(directory);
+        Map<File, Topology> newTopologies = loadTopologies(topologiesDirectory);
         List<TopologyEvent> events = createChangeEvents(oldTopologies, newTopologies);
         topologies = newTopologies;
         notifyChangeListeners(events);
@@ -329,7 +353,7 @@ public class DefaultTopologyService
   }
 
   public void deleteTopology(Topology t) {
-    File topoDir = directory;
+    File topoDir = topologiesDirectory;
 
     if(topoDir.isDirectory() && topoDir.canRead()) {
       File[] results = topoDir.listFiles();
@@ -357,8 +381,8 @@ public class DefaultTopologyService
   public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config) {
     File tFile = null;
     Map<String, List<String>> urls = new HashMap<>();
-    if(directory.isDirectory() && directory.canRead()) {
-      for(File f : directory.listFiles()){
+    if(topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
+      for(File f : topologiesDirectory.listFiles()){
         if(FilenameUtils.removeExtension(f.getName()).equals(t.getName())){
           tFile = f;
         }
@@ -387,12 +411,16 @@ public class DefaultTopologyService
 
   @Override
   public void startMonitor() throws Exception {
-    monitor.start();
+    for (FileAlterationMonitor monitor : monitors) {
+      monitor.start();
+    }
   }
 
   @Override
   public void stopMonitor() throws Exception {
-    monitor.stop();
+    for (FileAlterationMonitor monitor : monitors) {
+      monitor.stop();
+    }
   }
 
   @Override
@@ -414,6 +442,16 @@ public class DefaultTopologyService
 
   @Override
   public void onFileDelete(java.io.File file) {
+    // For full topology descriptors, we need to make sure to delete any corresponding simple descriptors to prevent
+    // unintended subsequent generation of the topology descriptor
+    for (String ext : DescriptorsMonitor.SUPPORTED_EXTENSIONS) {
+      File simpleDesc =
+              new File(descriptorsDirectory, FilenameUtils.getBaseName(file.getName()) + "." + ext);
+      if (simpleDesc.exists()) {
+        simpleDesc.delete();
+      }
+    }
+
     onFileChange(file);
   }
 
@@ -436,12 +474,200 @@ public class DefaultTopologyService
   public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
 
     try {
-      initListener(calculateAbsoluteTopologiesDir(config));
-    } catch (IOException io) {
+      listeners = new HashSet<>();
+      topologies = new HashMap<>();
+
+      topologiesDirectory = calculateAbsoluteTopologiesDir(config);
+
+      File configDirectory = calculateAbsoluteConfigDir(config);
+      descriptorsDirectory = new File(configDirectory, "descriptors");
+      File sharedProvidersDirectory = new File(configDirectory, "shared-providers");
+
+      // Add support for conf/topologies
+      initListener(topologiesDirectory, this, this);
+
+      // Add support for conf/descriptors
+      DescriptorsMonitor dm = new DescriptorsMonitor(topologiesDirectory, aliasService);
+      initListener(descriptorsDirectory,
+                   dm,
+                   dm);
+
+      // Add support for conf/shared-providers
+      SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(dm, descriptorsDirectory);
+      initListener(sharedProvidersDirectory, spm, spm);
+
+    } catch (IOException | SAXException io) {
       throw new ServiceLifecycleException(io.getMessage());
-    } catch (SAXException sax) {
-      throw new ServiceLifecycleException(sax.getMessage());
+    }
+  }
+
+
+  /**
+   * Change handler for simple descriptors
+   */
+  public static class DescriptorsMonitor extends FileAlterationListenerAdaptor
+                                          implements FileFilter {
+
+    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<String>();
+    static {
+      SUPPORTED_EXTENSIONS.add("json");
+      SUPPORTED_EXTENSIONS.add("yml");
+    }
+
+    private File topologiesDir;
+
+    private AliasService aliasService;
+
+    private Map<String, List<String>> providerConfigReferences = new HashMap<>();
+
+
+    public DescriptorsMonitor(File topologiesDir, AliasService aliasService) {
+      this.topologiesDir  = topologiesDir;
+      this.aliasService   = aliasService;
+    }
+
+    List<String> getReferencingDescriptors(String providerConfigPath) {
+      List<String> result = providerConfigReferences.get(providerConfigPath);
+      if (result == null) {
+        result = Collections.emptyList();
+      }
+      return result;
+    }
+
+    @Override
+    public void onFileCreate(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileDelete(File file) {
+      // For simple descriptors, we need to make sure to delete any corresponding full topology descriptors to trigger undeployment
+      for (String ext : DefaultTopologyService.SUPPORTED_TOPOLOGY_FILE_EXTENSIONS) {
+        File topologyFile =
+                new File(topologiesDir, FilenameUtils.getBaseName(file.getName()) + "." + ext);
+        if (topologyFile.exists()) {
+          topologyFile.delete();
+        }
+      }
+
+      String normalizedFilePath = FilenameUtils.normalize(file.getAbsolutePath());
+      String reference = null;
+      for (Map.Entry<String, List<String>> entry : providerConfigReferences.entrySet()) {
+        if (entry.getValue().contains(normalizedFilePath)) {
+          reference = entry.getKey();
+          break;
+        }
+      }
+      if (reference != null) {
+        providerConfigReferences.get(reference).remove(normalizedFilePath);
+      }
+    }
+
+    @Override
+    public void onFileChange(File file) {
+      try {
+        // When a simple descriptor has been created or modified, generate the new topology descriptor
+        Map<String, File> result = SimpleDescriptorHandler.handle(file, topologiesDir, aliasService);
+
+        // Add the provider config reference relationship for handling updates to the provider config
+        String providerConfig = FilenameUtils.normalize(result.get("reference").getAbsolutePath());
+        if (!providerConfigReferences.containsKey(providerConfig)) {
+          providerConfigReferences.put(providerConfig, new ArrayList<String>());
+        }
+        List<String> refs = providerConfigReferences.get(providerConfig);
+        String descriptorName = FilenameUtils.normalize(file.getAbsolutePath());
+        if (!refs.contains(descriptorName)) {
+          // Need to check if descriptor had previously referenced another provider config, so it can be removed
+          for (List<String> descs : providerConfigReferences.values()) {
+            if (descs.contains(descriptorName)) {
+              descs.remove(descriptorName);
+            }
+          }
+
+          // Add the current reference relationship
+          refs.add(descriptorName);
+        }
+      } catch (Exception e) {
+        log.simpleDescriptorHandlingError(file.getName(), e);
+      }
+    }
+
+    @Override
+    public boolean accept(File file) {
+      boolean accept = false;
+      if (!file.isDirectory() && file.canRead()) {
+        String extension = FilenameUtils.getExtension(file.getName());
+        if (SUPPORTED_EXTENSIONS.contains(extension)) {
+          accept = true;
+        }
+      }
+      return accept;
+    }
+  }
+
+  /**
+   * Change handler for shared provider configurations
+   */
+  public static class SharedProviderConfigMonitor extends FileAlterationListenerAdaptor
+          implements FileFilter {
+
+    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<>();
+    static {
+      SUPPORTED_EXTENSIONS.add("xml");
     }
 
+    private DescriptorsMonitor descriptorsMonitor;
+    private File descriptorsDir;
+
+
+    SharedProviderConfigMonitor(DescriptorsMonitor descMonitor, File descriptorsDir) {
+      this.descriptorsMonitor = descMonitor;
+      this.descriptorsDir     = descriptorsDir;
+    }
+
+    @Override
+    public void onFileCreate(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileDelete(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileChange(File file) {
+      // For shared provider configuration, we need to update any simple descriptors that reference it
+      for (File descriptor : getReferencingDescriptors(file)) {
+        descriptor.setLastModified(System.currentTimeMillis());
+      }
+    }
+
+    private List<File> getReferencingDescriptors(File sharedProviderConfig) {
+      List<File> references = new ArrayList<>();
+
+      for (File descriptor : descriptorsDir.listFiles()) {
+        if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
+          for (String reference : descriptorsMonitor.getReferencingDescriptors(FilenameUtils.normalize(sharedProviderConfig.getAbsolutePath()))) {
+            references.add(new File(reference));
+          }
+        }
+      }
+
+      return references;
+    }
+
+    @Override
+    public boolean accept(File file) {
+      boolean accept = false;
+      if (!file.isDirectory() && file.canRead()) {
+        String extension = FilenameUtils.getExtension(file.getName());
+        if (SUPPORTED_EXTENSIONS.contains(extension)) {
+          accept = true;
+        }
+      }
+      return accept;
+    }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index b5e80d2..a30cf13 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@ -77,7 +77,7 @@ public class BeanPropertyTopologyBuilder implements TopologyBuilder {
         Topology topology = new Topology();
         topology.setName(name);
 
-          for (Provider provider : providers) {
+        for (Provider provider : providers) {
             topology.addProvider(provider);
         }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
new file mode 100644
index 0000000..6534b5e
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+public class DefaultServiceDiscoveryConfig implements ServiceDiscoveryConfig {
+    private String address  = null;
+    private String user     = null;
+    private String pwdAlias = null;
+
+    public DefaultServiceDiscoveryConfig(String address) {
+        this.address = address;
+    }
+
+    public void setUser(String username) {
+        this.user = username;
+    }
+
+    public void setPasswordAlias(String alias) {
+        this.pwdAlias = alias;
+    }
+
+    public String getAddress() {
+        return address;
+    }
+
+    public String getUser() {
+        return user;
+    }
+
+    public String getPasswordAlias() {
+        return pwdAlias;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
new file mode 100644
index 0000000..70d5f61
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.Service;
+
+import java.lang.reflect.Field;
+import java.util.ServiceLoader;
+
+/**
+ * Creates instances of ServiceDiscovery implementations.
+ *
+ * This factory uses the ServiceLoader mechanism to load ServiceDiscovery implementations as extensions.
+ *
+ */
+public abstract class ServiceDiscoveryFactory {
+
+    private static final Service[] NO_GATEWAY_SERVICS = new Service[]{};
+
+
+    public static ServiceDiscovery get(String type) {
+        return get(type, NO_GATEWAY_SERVICS);
+    }
+
+
+    public static ServiceDiscovery get(String type, Service...gatewayServices) {
+        ServiceDiscovery sd  = null;
+
+        // Look up the available ServiceDiscovery types
+        ServiceLoader<ServiceDiscoveryType> loader = ServiceLoader.load(ServiceDiscoveryType.class);
+        for (ServiceDiscoveryType sdt : loader) {
+            if (sdt.getType().equalsIgnoreCase(type)) {
+                try {
+                    ServiceDiscovery instance = sdt.newInstance();
+                    // Make sure the type reported by the instance matches the type declared by the factory
+                    // (is this necessary?)
+                    if (instance.getType().equalsIgnoreCase(type)) {
+                        sd = instance;
+
+                        // Inject any gateway services that were specified, and which are referenced in the impl
+                        if (gatewayServices != null && gatewayServices.length > 0) {
+                            for (Field field : sd.getClass().getDeclaredFields()) {
+                                if (field.getDeclaredAnnotation(GatewayService.class) != null) {
+                                    for (Service s : gatewayServices) {
+                                        if (s != null) {
+                                            if (field.getType().isAssignableFrom(s.getClass())) {
+                                                field.setAccessible(true);
+                                                field.set(sd, s);
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                        break;
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return sd;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
new file mode 100644
index 0000000..aa28469
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import java.util.List;
+
+public interface SimpleDescriptor {
+
+    String getName();
+
+    String getDiscoveryType();
+
+    String getDiscoveryAddress();
+
+    String getDiscoveryUser();
+
+    String getDiscoveryPasswordAlias();
+
+    String getClusterName();
+
+    String getProviderConfig();
+
+    List<Service> getServices();
+
+
+    interface Service {
+        String getName();
+
+        List<String> getURLs();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
new file mode 100644
index 0000000..3df6d2f
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+
+public class SimpleDescriptorFactory {
+
+    /**
+     * Create a SimpleDescriptor from the specified file.
+     *
+     * @param path The path to the file.
+     * @return A SimpleDescriptor based on the contents of the file.
+     *
+     * @throws IOException
+     */
+    public static SimpleDescriptor parse(String path) throws IOException {
+        SimpleDescriptor sd;
+
+        if (path.endsWith(".json")) {
+            sd = parseJSON(path);
+        } else if (path.endsWith(".yml")) {
+            sd = parseYAML(path);
+        } else {
+           throw new IllegalArgumentException("Unsupported simple descriptor format: " + path.substring(path.lastIndexOf('.')));
+        }
+
+        return sd;
+    }
+
+
+    static SimpleDescriptor parseJSON(String path) throws IOException {
+        final ObjectMapper mapper = new ObjectMapper();
+        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
+        if (sd != null) {
+            sd.setName(FilenameUtils.getBaseName(path));
+        }
+        return sd;
+    }
+
+
+    static SimpleDescriptor parseYAML(String path) throws IOException {
+        final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
+        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
+        if (sd != null) {
+            sd.setName(FilenameUtils.getBaseName(path));
+        }
+        return sd;
+    }
+
+}


[11/22] knox git commit: KNOX-1065 - Add some unit tests for the DefaultTokenAuthorityService

Posted by mo...@apache.org.
KNOX-1065 - Add some unit tests for the DefaultTokenAuthorityService


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/78ef4e50
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/78ef4e50
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/78ef4e50

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 78ef4e50ba6f7522aa363d5d1df8f8d04320003e
Parents: c2ca443
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Mon Sep 25 11:58:24 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Mon Sep 25 11:58:24 2017 +0100

----------------------------------------------------------------------
 .../impl/DefaultTokenAuthorityServiceTest.java  | 160 +++++++++++++++++++
 .../resources/keystores/server-keystore.jks     | Bin 0 -> 1387 bytes
 2 files changed, 160 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/78ef4e50/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
new file mode 100644
index 0000000..7cc9971
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.services.token.impl;
+
+import java.io.File;
+import java.security.Principal;
+import java.util.HashMap;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.KeystoreService;
+import org.apache.hadoop.gateway.services.security.MasterService;
+import org.apache.hadoop.gateway.services.security.impl.DefaultKeystoreService;
+import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+/**
+ * Some unit tests for the DefaultTokenAuthorityService.
+ */
+public class DefaultTokenAuthorityServiceTest extends org.junit.Assert {
+
+  @Test
+  public void testTokenCreation() throws Exception {
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+    GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+
+    EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+    EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+    EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+    MasterService ms = EasyMock.createNiceMock(MasterService.class);
+    EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+    EasyMock.replay(principal, config, ms, as);
+
+    KeystoreService ks = new DefaultKeystoreService();
+    ((DefaultKeystoreService)ks).setMasterService(ms);
+
+    ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+    JWTokenAuthority ta = new DefaultTokenAuthorityService();
+    ((DefaultTokenAuthorityService)ta).setAliasService(as);
+    ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+    ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+    JWT token = ta.issueToken(principal, "RS256");
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+  }
+
+  @Test
+  public void testTokenCreationAudience() throws Exception {
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+    GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+
+    EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+    EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+    EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+    MasterService ms = EasyMock.createNiceMock(MasterService.class);
+    EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+    EasyMock.replay(principal, config, ms, as);
+
+    KeystoreService ks = new DefaultKeystoreService();
+    ((DefaultKeystoreService)ks).setMasterService(ms);
+
+    ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+    JWTokenAuthority ta = new DefaultTokenAuthorityService();
+    ((DefaultTokenAuthorityService)ta).setAliasService(as);
+    ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+    ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+    JWT token = ta.issueToken(principal, "https://login.example.com", "RS256");
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+  }
+
+  @Test
+  public void testTokenCreationNullAudience() throws Exception {
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+    GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+
+    EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+    EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+    EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+    MasterService ms = EasyMock.createNiceMock(MasterService.class);
+    EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+    EasyMock.replay(principal, config, ms, as);
+
+    KeystoreService ks = new DefaultKeystoreService();
+    ((DefaultKeystoreService)ks).setMasterService(ms);
+
+    ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+    JWTokenAuthority ta = new DefaultTokenAuthorityService();
+    ((DefaultTokenAuthorityService)ta).setAliasService(as);
+    ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+    ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+    JWT token = ta.issueToken(principal, null, "RS256");
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/78ef4e50/gateway-server/src/test/resources/keystores/server-keystore.jks
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/keystores/server-keystore.jks b/gateway-server/src/test/resources/keystores/server-keystore.jks
new file mode 100644
index 0000000..570c92c
Binary files /dev/null and b/gateway-server/src/test/resources/keystores/server-keystore.jks differ


[12/22] knox git commit: KNOX-1014 - remove extraneous directory

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
deleted file mode 100644
index 1e5e7b2..0000000
--- a/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ /dev/null
@@ -1,856 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import net.minidev.json.JSONObject;
-import net.minidev.json.JSONValue;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.*;
-
-
-/**
- * Test the Ambari ServiceDiscovery implementation.
- *
- * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
- *      treatment of the responses as they were observed at the time the tests are developed.
- */
-public class AmbariServiceDiscoveryTest {
-
-    @Test
-    public void testSingleClusterDiscovery() throws Exception {
-        final String discoveryAddress = "http://ambarihost:8080";
-        final String clusterName = "testCluster";
-        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
-
-        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
-        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
-        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
-        EasyMock.replay(sdc);
-
-        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
-        assertNotNull(cluster);
-        assertEquals(clusterName, cluster.getName());
-        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
-        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
-
-//        printServiceURLs(cluster);
-    }
-
-
-    @Test
-    public void testBulkClusterDiscovery() throws Exception {
-        final String discoveryAddress = "http://ambarihost:8080";
-        final String clusterName = "anotherCluster";
-        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
-
-        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
-        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
-        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
-        EasyMock.replay(sdc);
-
-        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
-        assertNotNull(clusters);
-        assertEquals(1, clusters.size());
-        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
-        assertNotNull(cluster);
-        assertEquals(clusterName, cluster.getName());
-        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
-        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
-
-//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
-    }
-
-
-    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
-        final String[] services = new String[]{"NAMENODE",
-                                               "JOBTRACKER",
-                                               "WEBHDFS",
-                                               "WEBHCAT",
-                                               "OOZIE",
-                                               "WEBHBASE",
-                                               "HIVE",
-                                               "RESOURCEMANAGER"};
-        printServiceURLs(cluster, services);
-    }
-
-
-    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
-        for (String name : services) {
-            StringBuilder sb = new StringBuilder();
-            List<String> urls = cluster.getServiceURLs(name);
-            if (urls != null && !urls.isEmpty()) {
-                for (String url : urls) {
-                    sb.append(url);
-                    sb.append(" ");
-                }
-            }
-            System.out.println(String.format("%18s: %s", name, sb.toString()));
-        }
-    }
-
-
-    /**
-     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
-     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
-     */
-    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
-
-        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
-
-        private Map<String, JSONObject> cannedResponses = new HashMap<>();
-
-        TestAmbariServiceDiscovery(String clusterName) {
-            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
-                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                               clusterName)));
-
-            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
-                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                clusterName)));
-
-            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
-                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                     clusterName)));
-        }
-
-        @Override
-        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
-            return cannedResponses.get(url.substring(url.indexOf("/api")));
-        }
-    }
-
-
-    ////////////////////////////////////////////////////////////////////////
-    //  JSON response templates, based on actual response content excerpts
-    ////////////////////////////////////////////////////////////////////////
-
-    private static final String CLUSTERS_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"Clusters\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"version\" : \"HDP-2.6\"\n" +
-    "      }\n" +
-    "    }\n" +
-    "  ]" +
-    "}";
-
-
-    private static final String HOSTROLES_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
-    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HBASE_MASTER\",\n" +
-    "            \"service_name\" : \"HBASE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HBASE_MASTER\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HBASE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"HDFS\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"NAMENODE\",\n" +
-    "            \"service_name\" : \"HDFS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"NAMENODE\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HDFS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
-    "            \"service_name\" : \"HDFS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HDFS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"HIVE\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HCAT\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HCAT\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HIVE_SERVER\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HIVE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"OOZIE\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
-    "            \"service_name\" : \"OOZIE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"OOZIE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"YARN\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"NODEMANAGER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"NODEMANAGER\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
-    "                \"ha_state\" : \"ACTIVE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"ZOOKEEPER\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "            \"service_name\" : \"ZOOKEEPER\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            },\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            },\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    }\n" +
-    "  ]\n" +
-    "}\n";
-
-
-    private static final String SERVICECONFIGS_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hbase-site\",\n" +
-    "          \"tag\" : \"version1503410563715\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
-    "            \"hbase.master.info.port\" : \"16010\",\n" +
-    "            \"hbase.master.port\" : \"16000\",\n" +
-    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
-    "            \"hbase.regionserver.port\" : \"16020\",\n" +
-    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
-    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
-    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
-    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
-    "      \"service_name\" : \"HBASE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hdfs-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
-    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
-    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
-    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
-    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
-    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
-    "            \"dfs.https.port\" : \"50470\",\n" +
-    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
-    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
-    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
-    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
-    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
-    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
-    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"final\" : {\n" +
-    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
-    "              \"dfs.namenode.http-address\" : \"true\",\n" +
-    "              \"dfs.support.append\" : \"true\",\n" +
-    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
-    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
-    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"core-site\",\n" +
-    "          \"tag\" : \"version1502131215159\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
-    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"final\" : {\n" +
-    "              \"fs.defaultFS\" : \"true\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 2,\n" +
-    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
-    "      \"service_name\" : \"HDFS\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-env\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive_security_authorization\" : \"None\",\n" +
-    "            \"webhcat_user\" : \"hcat\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hiveserver2-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
-    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
-    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
-    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-interactive-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
-    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
-    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
-    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
-    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
-    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
-    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
-    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
-    "            \"hive.server2.webui.port\" : \"10502\",\n" +
-    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
-    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"tez-interactive-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
-    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
-    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-site\",\n" +
-    "          \"tag\" : \"version1502130841736\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
-    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
-    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
-    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
-    "            \"hive.server2.authentication\" : \"NONE\",\n" +
-    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
-    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
-    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
-    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
-    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
-    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
-    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
-    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
-    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
-    "            \"hive.server2.transport.mode\" : \"http\",\n" +
-    "            \"hive.server2.use.SSL\" : \"false\",\n" +
-    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"hidden\" : {\n" +
-    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"webhcat-site\",\n" +
-    "          \"tag\" : \"version1502131111746\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"templeton.port\" : \"50111\",\n" +
-    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
-    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
-    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
-    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
-    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"createtime\" : 1502131110745,\n" +
-    "      \"group_id\" : -1,\n" +
-    "      \"group_name\" : \"Default\",\n" +
-    "      \"hosts\" : [ ],\n" +
-    "      \"is_cluster_compatible\" : true,\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 3,\n" +
-    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
-    "      \"service_name\" : \"HIVE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"oozie-site\",\n" +
-    "          \"tag\" : \"version1502131137103\",\n" +
-    "          \"version\" : 3,\n" +
-    "          \"properties\" : {\n" +
-    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 3,\n" +
-    "      \"service_name\" : \"OOZIE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"tez-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"createtime\" : 1502122253525,\n" +
-    "      \"group_id\" : -1,\n" +
-    "      \"group_name\" : \"Default\",\n" +
-    "      \"hosts\" : [ ],\n" +
-    "      \"is_cluster_compatible\" : true,\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
-    "      \"service_name\" : \"TEZ\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"yarn-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
-    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
-    "            \"yarn.acl.enable\" : \"false\",\n" +
-    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
-    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
-    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
-    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
-    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
-    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
-    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
-    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
-    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
-    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
-    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
-    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
-    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_name\" : \"YARN\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    }\n" +
-    "  ]\n" +
-    "}";
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-release/home/conf/descriptors/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/descriptors/README b/b/gateway-release/home/conf/descriptors/README
deleted file mode 100644
index a2e5226..0000000
--- a/b/gateway-release/home/conf/descriptors/README
+++ /dev/null
@@ -1 +0,0 @@
-THIS DIRECTORY IS WHERE SIMPLE TOPOLOGY DESCRIPTORS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-release/home/conf/shared-providers/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/shared-providers/README b/b/gateway-release/home/conf/shared-providers/README
deleted file mode 100644
index 44d12a3..0000000
--- a/b/gateway-release/home/conf/shared-providers/README
+++ /dev/null
@@ -1 +0,0 @@
-THIS DIRECTORY IS WHERE SHARED PROVIDER CONFIGURATIONS CAN BE PLACED
\ No newline at end of file


[19/22] knox git commit: KNOX-998 - Merging from current master

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
new file mode 100644
index 0000000..c4a3914
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -0,0 +1,187 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.Service;
+import org.apache.knox.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryFactory;
+
+import java.io.*;
+import java.util.*;
+
+
+/**
+ * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
+ * gateway.
+ */
+public class SimpleDescriptorHandler {
+
+    private static final Service[] NO_GATEWAY_SERVICES = new Service[]{};
+
+    private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class);
+
+    public static Map<String, File> handle(File desc) throws IOException {
+        return handle(desc, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(File desc, Service...gatewayServices) throws IOException {
+        return handle(desc, desc.getParentFile(), gatewayServices);
+    }
+
+    public static Map<String, File> handle(File desc, File destDirectory) throws IOException {
+        return handle(desc, destDirectory, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(File desc, File destDirectory, Service...gatewayServices) throws IOException {
+        return handle(SimpleDescriptorFactory.parse(desc.getAbsolutePath()), desc.getParentFile(), destDirectory, gatewayServices);
+    }
+
+    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory) {
+        return handle(desc, srcDirectory, destDirectory, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) {
+        Map<String, File> result = new HashMap<>();
+
+        File topologyDescriptor;
+
+        DefaultServiceDiscoveryConfig sdc = new DefaultServiceDiscoveryConfig(desc.getDiscoveryAddress());
+        sdc.setUser(desc.getDiscoveryUser());
+        sdc.setPasswordAlias(desc.getDiscoveryPasswordAlias());
+        ServiceDiscovery sd = ServiceDiscoveryFactory
+            .get(desc.getDiscoveryType(), gatewayServices);
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
+
+        Map<String, List<String>> serviceURLs = new HashMap<>();
+
+        if (cluster != null) {
+            for (SimpleDescriptor.Service descService : desc.getServices()) {
+                String serviceName = descService.getName();
+
+                List<String> descServiceURLs = descService.getURLs();
+                if (descServiceURLs == null || descServiceURLs.isEmpty()) {
+                    descServiceURLs = cluster.getServiceURLs(serviceName);
+                }
+
+                // If there is at least one URL associated with the service, then add it to the map
+                if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
+                    serviceURLs.put(serviceName, descServiceURLs);
+                } else {
+                    log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
+                    throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName +
+                                                    ". Topology update aborted!");
+                }
+            }
+        } else {
+            log.failedToDiscoverClusterServices(desc.getClusterName());
+        }
+
+        topologyDescriptor = null;
+        File providerConfig = null;
+        try {
+            // Verify that the referenced provider configuration exists before attempting to reading it
+            providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory);
+            if (providerConfig == null) {
+                log.failedToResolveProviderConfigRef(desc.getProviderConfig());
+                throw new IllegalArgumentException("Unresolved provider configuration reference: " +
+                                                   desc.getProviderConfig() + " ; Topology update aborted!");
+            }
+            result.put("reference", providerConfig);
+
+            // TODO: Should the contents of the provider config be validated before incorporating it into the topology?
+
+            String topologyFilename = desc.getName();
+            if (topologyFilename == null) {
+                topologyFilename = desc.getClusterName();
+            }
+            topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
+            FileWriter fw = new FileWriter(topologyDescriptor);
+
+            fw.write("<topology>\n");
+
+            // Copy the externalized provider configuration content into the topology descriptor in-line
+            InputStreamReader policyReader = new InputStreamReader(new FileInputStream(providerConfig));
+            char[] buffer = new char[1024];
+            int count;
+            while ((count = policyReader.read(buffer)) > 0) {
+                fw.write(buffer, 0, count);
+            }
+            policyReader.close();
+
+            // Write the service declarations
+            for (String serviceName : serviceURLs.keySet()) {
+                fw.write("    <service>\n");
+                fw.write("        <role>" + serviceName + "</role>\n");
+                for (String url : serviceURLs.get(serviceName)) {
+                    fw.write("        <url>" + url + "</url>\n");
+                }
+                fw.write("    </service>\n");
+            }
+
+            fw.write("</topology>\n");
+
+            fw.flush();
+            fw.close();
+        } catch (IOException e) {
+            log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
+            topologyDescriptor.delete();
+        }
+
+        result.put("topology", topologyDescriptor);
+        return result;
+    }
+
+
+    private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
+        File providerConfig;
+
+        // If the reference includes a path
+        if (reference.contains(File.separator)) {
+            // Check if it's an absolute path
+            providerConfig = new File(reference);
+            if (!providerConfig.exists()) {
+                // If it's not an absolute path, try treating it as a relative path
+                providerConfig = new File(srcDirectory, reference);
+                if (!providerConfig.exists()) {
+                    providerConfig = null;
+                }
+            }
+        } else { // No file path, just a name
+            // Check if it's co-located with the referencing descriptor
+            providerConfig = new File(srcDirectory, reference);
+            if (!providerConfig.exists()) {
+                // Check the shared-providers config location
+                File sharedProvidersDir = new File(srcDirectory, "../shared-providers");
+                if (sharedProvidersDir.exists()) {
+                    providerConfig = new File(sharedProvidersDir, reference);
+                    if (!providerConfig.exists()) {
+                        // Check if it's a valid name without the extension
+                        providerConfig = new File(sharedProvidersDir, reference + ".xml");
+                        if (!providerConfig.exists()) {
+                            providerConfig = null;
+                        }
+                    }
+                }
+            }
+        }
+
+        return providerConfig;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorImpl.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorImpl.java
new file mode 100644
index 0000000..0ec7acf
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorImpl.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.ArrayList;
+import java.util.List;
+
+class SimpleDescriptorImpl implements SimpleDescriptor {
+
+    @JsonProperty("discovery-type")
+    private String discoveryType;
+
+    @JsonProperty("discovery-address")
+    private String discoveryAddress;
+
+    @JsonProperty("discovery-user")
+    private String discoveryUser;
+
+    @JsonProperty("discovery-pwd-alias")
+    private String discoveryPasswordAlias;
+
+    @JsonProperty("provider-config-ref")
+    private String providerConfig;
+
+    @JsonProperty("cluster")
+    private String cluster;
+
+    @JsonProperty("services")
+    private List<ServiceImpl> services;
+
+    private String name = null;
+
+    void setName(String name) {
+        this.name = name;
+    }
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+    @Override
+    public String getDiscoveryType() {
+        return discoveryType;
+    }
+
+    @Override
+    public String getDiscoveryAddress() {
+        return discoveryAddress;
+    }
+
+    @Override
+    public String getDiscoveryUser() {
+        return discoveryUser;
+    }
+
+    @Override
+    public String getDiscoveryPasswordAlias() {
+        return discoveryPasswordAlias;
+    }
+
+    @Override
+    public String getClusterName() {
+        return cluster;
+    }
+
+    @Override
+    public String getProviderConfig() {
+        return providerConfig;
+    }
+
+    @Override
+    public List<Service> getServices() {
+        List<Service> result = new ArrayList<>();
+        result.addAll(services);
+        return result;
+    }
+
+    public static class ServiceImpl implements Service {
+        private String name;
+        private List<String> urls;
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public List<String> getURLs() {
+            return urls;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
new file mode 100644
index 0000000..eb9d887
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import org.apache.knox.gateway.i18n.messages.Message;
+import org.apache.knox.gateway.i18n.messages.MessageLevel;
+import org.apache.knox.gateway.i18n.messages.Messages;
+import org.apache.knox.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.simple")
+public interface SimpleDescriptorMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Service discovery for cluster {0} failed.")
+    void failedToDiscoverClusterServices(final String cluster);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No URLs were discovered for {0} in the {1} cluster.")
+    void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Failed to resolve the referenced provider configuration {0}.")
+    void failedToResolveProviderConfigRef(final String providerConfigRef);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Error generating topology {0} from simple descriptor: {1}")
+    void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
+                                                      @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
index 7cc9971..eba98a4 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
@@ -15,19 +15,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.gateway.services.token.impl;
+package org.apache.knox.gateway.services.token.impl;
 
 import java.io.File;
 import java.security.Principal;
 import java.util.HashMap;
 
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.KeystoreService;
-import org.apache.hadoop.gateway.services.security.MasterService;
-import org.apache.hadoop.gateway.services.security.impl.DefaultKeystoreService;
-import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
-import org.apache.hadoop.gateway.services.security.token.impl.JWT;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.KeystoreService;
+import org.apache.knox.gateway.services.security.MasterService;
+import org.apache.knox.gateway.services.security.impl.DefaultKeystoreService;
+import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
+import org.apache.knox.gateway.services.security.token.impl.JWT;
 import org.easymock.EasyMock;
 import org.junit.Test;
 

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
deleted file mode 100644
index 269bed2..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
-import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import static org.junit.Assert.*;
-
-
-public class PropertiesFileServiceDiscoveryTest {
-
-    private static final Map<String, String> clusterProperties = new HashMap<>();
-    static {
-        clusterProperties.put("mycluster.name", "mycluster");
-        clusterProperties.put("mycluster.NAMENODE", "hdfs://namenodehost:8020");
-        clusterProperties.put("mycluster.JOBTRACKER", "rpc://jobtrackerhostname:8050");
-        clusterProperties.put("mycluster.WEBHCAT", "http://webhcathost:50111/templeton");
-        clusterProperties.put("mycluster.OOZIE", "http://ooziehost:11000/oozie");
-        clusterProperties.put("mycluster.HIVE", "http://hivehostname:10001/clipath");
-        clusterProperties.put("mycluster.RESOURCEMANAGER", "http://remanhost:8088/ws");
-    }
-
-    private static final Properties config = new Properties();
-    static {
-        for (String name : clusterProperties.keySet()) {
-            config.setProperty(name, clusterProperties.get(name));
-        }
-    }
-
-
-    @Test
-    public void testPropertiesFileServiceDiscovery() throws Exception {
-        ServiceDiscovery sd = ServiceDiscoveryFactory.get("PROPERTIES_FILE");
-        assertNotNull(sd);
-
-        String discoveryAddress = this.getClass().getName() + "__test-discovery-source.properties";
-        File discoverySource = new File(discoveryAddress);
-        try {
-            config.store(new FileOutputStream(discoverySource), "Test discovery source for PropertiesFileServiceDiscovery");
-
-            ServiceDiscovery.Cluster c =
-                        sd.discover(new DefaultServiceDiscoveryConfig(discoverySource.getAbsolutePath()), "mycluster");
-            assertNotNull(c);
-            for (String name : clusterProperties.keySet()) {
-                assertEquals(clusterProperties.get(name), c.getServiceURLs(name.split("\\.")[1]).get(0));
-            }
-        } finally {
-            discoverySource.delete();
-        }
-    }
-
-
-    private void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
-        for (String name : services) {
-            String value = "";
-            List<String> urls = cluster.getServiceURLs(name);
-            if (urls != null && !urls.isEmpty()) {
-                for (String url : urls) {
-                    value += url + " ";
-                }
-            }
-            System.out.println(String.format("%18s: %s", name, value));
-        }
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
deleted file mode 100644
index d592ede..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery;
-
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
-import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType;
-import org.junit.Test;
-
-import java.lang.reflect.Field;
-
-import static org.junit.Assert.*;
-
-
-public class ServiceDiscoveryFactoryTest {
-
-    @Test
-    public void testGetDummyImpl() throws Exception {
-        String TYPE = "DUMMY";
-        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
-        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
-        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
-    }
-
-
-    @Test
-    public void testGetDummyImplWithMismatchedCase() throws Exception {
-        String TYPE = "dUmmY";
-        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
-        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
-        assertEquals("Unexpected ServiceDiscovery type.", TYPE.toUpperCase(), sd.getType());
-    }
-
-
-    @Test
-    public void testGetInvalidImpl() throws Exception {
-        String TYPE = "InValID";
-        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
-        assertNull("Unexpected ServiceDiscovery object.", sd);
-    }
-
-
-    @Test
-    public void testGetImplWithMismatchedType() throws Exception {
-        String TYPE = "DeclaredType";
-        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
-        assertNull("Unexpected ServiceDiscovery object.", sd);
-    }
-
-
-    @Test
-    public void testGetPropertiesFileImplWithAliasServiceInjection() throws Exception {
-        String TYPE = "PROPERTIES_FILE";
-        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE, new DefaultAliasService());
-        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
-        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
-
-        // Verify that the AliasService was injected as expected
-        Field aliasServiceField = sd.getClass().getDeclaredField("aliasService");
-        aliasServiceField.setAccessible(true);
-        Object fieldValue = aliasServiceField.get(sd);
-        assertNotNull(fieldValue);
-        assertTrue(AliasService.class.isAssignableFrom(fieldValue.getClass()));
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
deleted file mode 100644
index 4a5323e..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.test.extension;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * This implementation is intended to be used by tests for which the actual service URLs are of no importance, such that
- * tests can be written without having a valid service registry (e.g., Ambari) available.
- */
-public class DummyServiceDiscovery implements ServiceDiscovery {
-
-    static final String TYPE = "DUMMY";
-
-    private static final Cluster DUMMY = new Cluster() {
-        @Override
-        public String getName() {
-            return "dummy";
-        }
-
-        @Override
-        public List<String> getServiceURLs(String serviceName) {
-            return Collections.singletonList("http://servicehost:9999/dummy");
-        }
-    };
-
-    private static final Map<String, Cluster> CLUSTERS = new HashMap<>();
-    static {
-        CLUSTERS.put(DUMMY.getName(), DUMMY);
-    }
-
-    @Override
-    public String getType() {
-        return TYPE;
-    }
-
-    @Override
-    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
-        return CLUSTERS;
-    }
-
-    @Override
-    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
-        return DUMMY;
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
deleted file mode 100644
index d47c38d..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.test.extension;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
-
-public class DummyServiceDiscoveryType implements ServiceDiscoveryType {
-    @Override
-    public String getType() {
-        return DummyServiceDiscovery.TYPE;
-    }
-
-    @Override
-    public ServiceDiscovery newInstance() {
-        return new DummyServiceDiscovery();
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
deleted file mode 100644
index a7fc34a..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.test.extension;
-
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.topology.discovery.GatewayService;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.*;
-
-class PropertiesFileServiceDiscovery implements ServiceDiscovery {
-
-    static final String TYPE = "PROPERTIES_FILE";
-
-    @GatewayService
-    AliasService aliasService;
-
-    @Override
-    public String getType() {
-        return TYPE;
-    }
-
-    @Override
-    public Map<String, ServiceDiscovery.Cluster> discover(ServiceDiscoveryConfig config) {
-
-        Map<String, ServiceDiscovery.Cluster> result = new HashMap<>();
-
-        Properties p = new Properties();
-        try {
-            p.load(new FileInputStream(config.getAddress()));
-
-            Map<String, Map<String, List<String>>> clusters = new HashMap<>();
-            for (Object key : p.keySet()) {
-                String propertyKey = (String)key;
-                String[] parts = propertyKey.split("\\.");
-                if (parts.length == 2) {
-                    String clusterName = parts[0];
-                    String serviceName = parts[1];
-                    String serviceURL  = p.getProperty(propertyKey);
-                    if (!clusters.containsKey(clusterName)) {
-                        clusters.put(clusterName, new HashMap<String, List<String>>());
-                    }
-                    Map<String, List<String>> serviceURLs = clusters.get(clusterName);
-                    if (!serviceURLs.containsKey(serviceName)) {
-                        serviceURLs.put(serviceName, new ArrayList<String>());
-                    }
-                    serviceURLs.get(serviceName).add(serviceURL);
-                }
-            }
-
-            for (String clusterName : clusters.keySet()) {
-                result.put(clusterName,
-                        new PropertiesFileServiceDiscovery.Cluster(clusterName, clusters.get(clusterName)));
-            }
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-
-        return result;
-    }
-
-
-    @Override
-    public ServiceDiscovery.Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
-        Map<String, ServiceDiscovery.Cluster> clusters = discover(config);
-        return clusters.get(clusterName);
-    }
-
-
-    static class Cluster implements ServiceDiscovery.Cluster {
-        private String name;
-        private Map<String, List<String>> serviceURLS = new HashMap<>();
-
-        Cluster(String name, Map<String, List<String>> serviceURLs) {
-            this.name = name;
-            this.serviceURLS.putAll(serviceURLs);
-        }
-
-        @Override
-        public String getName() {
-            return name;
-        }
-
-        @Override
-        public List<String> getServiceURLs(String serviceName) {
-            return serviceURLS.get(serviceName);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
deleted file mode 100644
index 2cfd998..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.test.extension;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
-
-public class PropertiesFileServiceDiscoveryType implements ServiceDiscoveryType {
-
-    @Override
-    public String getType() {
-        return PropertiesFileServiceDiscovery.TYPE;
-    }
-
-    @Override
-    public ServiceDiscovery newInstance() {
-        return new PropertiesFileServiceDiscovery();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
deleted file mode 100644
index 8f7b71a..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.test.extension;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-
-import java.util.Map;
-
-public class SneakyServiceDiscoveryImpl implements ServiceDiscovery {
-    @Override
-    public String getType() {
-        return "ActualType";
-    }
-
-    @Override
-    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
-        return null;
-    }
-
-    @Override
-    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
-        return null;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
deleted file mode 100644
index 97665dc..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.test.extension;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
-
-public class SneakyServiceDiscoveryType implements ServiceDiscoveryType {
-    @Override
-    public String getType() {
-        return "DeclaredType";
-    }
-
-    @Override
-    public ServiceDiscovery newInstance() {
-        return new SneakyServiceDiscoveryImpl();
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
deleted file mode 100644
index 3dac66a..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.simple;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.io.Writer;
-import java.util.*;
-
-import org.junit.Test;
-import static org.junit.Assert.*;
-
-
-public class SimpleDescriptorFactoryTest {
-
-
-    @Test
-    public void testParseJSONSimpleDescriptor() throws Exception {
-
-        final String   discoveryType    = "AMBARI";
-        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
-        final String   discoveryUser    = "admin";
-        final String   providerConfig   = "ambari-cluster-policy.xml";
-        final String   clusterName      = "myCluster";
-
-        final Map<String, List<String>> services = new HashMap<>();
-        services.put("NODEMANAGER", null);
-        services.put("JOBTRACKER", null);
-        services.put("RESOURCEMANAGER", null);
-        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
-        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
-
-        String fileName = "test-topology.json";
-        File testJSON = null;
-        try {
-            testJSON = writeJSON(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
-            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath());
-            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            if (testJSON != null) {
-                try {
-                    testJSON.delete();
-                } catch (Exception e) {
-                    // Ignore
-                }
-            }
-        }
-    }
-
-    @Test
-    public void testParseYAMLSimpleDescriptor() throws Exception {
-
-        final String   discoveryType    = "AMBARI";
-        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
-        final String   discoveryUser    = "joeblow";
-        final String   providerConfig   = "ambari-cluster-policy.xml";
-        final String   clusterName      = "myCluster";
-
-        final Map<String, List<String>> services = new HashMap<>();
-        services.put("NODEMANAGER", null);
-        services.put("JOBTRACKER", null);
-        services.put("RESOURCEMANAGER", null);
-        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
-        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
-
-        String fileName = "test-topology.yml";
-        File testYAML = null;
-        try {
-            testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
-            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath());
-            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            if (testYAML != null) {
-                try {
-                    testYAML.delete();
-                } catch (Exception e) {
-                    // Ignore
-                }
-            }
-        }
-    }
-
-
-    private void validateSimpleDescriptor(SimpleDescriptor    sd,
-                                          String              discoveryType,
-                                          String              discoveryAddress,
-                                          String              providerConfig,
-                                          String              clusterName,
-                                          Map<String, List<String>> expectedServices) {
-        assertNotNull(sd);
-        assertEquals(discoveryType, sd.getDiscoveryType());
-        assertEquals(discoveryAddress, sd.getDiscoveryAddress());
-        assertEquals(providerConfig, sd.getProviderConfig());
-        assertEquals(clusterName, sd.getClusterName());
-
-        List<SimpleDescriptor.Service> actualServices = sd.getServices();
-
-        assertEquals(expectedServices.size(), actualServices.size());
-
-        for (SimpleDescriptor.Service actualService : actualServices) {
-            assertTrue(expectedServices.containsKey(actualService.getName()));
-            assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs());
-        }
-    }
-
-
-    private File writeJSON(String path, String content) throws Exception {
-        File f = new File(path);
-
-        Writer fw = new FileWriter(f);
-        fw.write(content);
-        fw.flush();
-        fw.close();
-
-        return f;
-    }
-
-
-    private File writeJSON(String path,
-                           String discoveryType,
-                           String discoveryAddress,
-                           String discoveryUser,
-                           String providerConfig,
-                           String clusterName,
-                           Map<String, List<String>> services) throws Exception {
-        File f = new File(path);
-
-        Writer fw = new FileWriter(f);
-        fw.write("{" + "\n");
-        fw.write("\"discovery-type\":\"" + discoveryType + "\",\n");
-        fw.write("\"discovery-address\":\"" + discoveryAddress + "\",\n");
-        fw.write("\"discovery-user\":\"" + discoveryUser + "\",\n");
-        fw.write("\"provider-config-ref\":\"" + providerConfig + "\",\n");
-        fw.write("\"cluster\":\"" + clusterName + "\",\n");
-        fw.write("\"services\":[\n");
-
-        int i = 0;
-        for (String name : services.keySet()) {
-            fw.write("{\"name\":\"" + name + "\"");
-            List<String> urls = services.get(name);
-            if (urls != null) {
-                fw.write(", \"urls\":[");
-                Iterator<String> urlIter = urls.iterator();
-                while (urlIter.hasNext()) {
-                    fw.write("\"" + urlIter.next() + "\"");
-                    if (urlIter.hasNext()) {
-                        fw.write(", ");
-                    }
-                }
-                fw.write("]");
-            }
-            fw.write("}");
-            if (i++ < services.size() - 1) {
-                fw.write(",");
-            }
-            fw.write("\n");
-        }
-        fw.write("]\n");
-        fw.write("}\n");
-        fw.flush();
-        fw.close();
-
-        return f;
-    }
-
-    private File writeYAML(String path,
-                           String discoveryType,
-                           String discoveryAddress,
-                           String discoveryUser,
-                           String providerConfig,
-                           String clusterName,
-                           Map<String, List<String>> services) throws Exception {
-        File f = new File(path);
-
-        Writer fw = new FileWriter(f);
-        fw.write("---" + "\n");
-        fw.write("discovery-type: " + discoveryType + "\n");
-        fw.write("discovery-address: " + discoveryAddress + "\n");
-        fw.write("discovery-user: " + discoveryUser + "\n");
-        fw.write("provider-config-ref: " + providerConfig + "\n");
-        fw.write("cluster: " + clusterName+ "\n");
-        fw.write("services:\n");
-        for (String name : services.keySet()) {
-            fw.write("    - name: " + name + "\n");
-            List<String> urls = services.get(name);
-            if (urls != null) {
-                fw.write("      urls:\n");
-                for (String url : urls) {
-                    fw.write("          - " + url + "\n");
-                }
-            }
-        }
-        fw.flush();
-        fw.close();
-
-        return f;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
deleted file mode 100644
index 90c7146..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ /dev/null
@@ -1,239 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.simple;
-
-import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
-import org.apache.hadoop.gateway.util.XmlUtils;
-import org.easymock.EasyMock;
-import org.junit.Test;
-import org.w3c.dom.Document;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.xml.sax.SAXException;
-
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathFactory;
-import java.io.*;
-import java.util.*;
-
-import static org.junit.Assert.*;
-
-
-public class SimpleDescriptorHandlerTest {
-
-    private static final String TEST_PROVIDER_CONFIG =
-            "    <gateway>\n" +
-                    "        <provider>\n" +
-                    "            <role>authentication</role>\n" +
-                    "            <name>ShiroProvider</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "            <param>\n" +
-                    "                <!-- \n" +
-                    "                session timeout in minutes,  this is really idle timeout,\n" +
-                    "                defaults to 30mins, if the property value is not defined,, \n" +
-                    "                current client authentication would expire if client idles contiuosly for more than this value\n" +
-                    "                -->\n" +
-                    "                <name>sessionTimeout</name>\n" +
-                    "                <value>30</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm</name>\n" +
-                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapContextFactory</name>\n" +
-                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm.contextFactory</name>\n" +
-                    "                <value>$ldapContextFactory</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm.userDnTemplate</name>\n" +
-                    "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm.contextFactory.url</name>\n" +
-                    "                <value>ldap://localhost:33389</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
-                    "                <value>simple</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>urls./**</name>\n" +
-                    "                <value>authcBasic</value>\n" +
-                    "            </param>\n" +
-                    "        </provider>\n" +
-                    "\n" +
-                    "        <provider>\n" +
-                    "            <role>identity-assertion</role>\n" +
-                    "            <name>Default</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "        </provider>\n" +
-                    "\n" +
-                    "        <!--\n" +
-                    "        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
-                    "        For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
-                    "        some AWS internal host name.  If the client needs to make a subsequent request to the host identified\n" +
-                    "        in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
-                    "\n" +
-                    "        If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
-                    "        enabled parameter as false.\n" +
-                    "\n" +
-                    "        The name parameter specifies the external host names in a comma separated list.\n" +
-                    "        The value parameter specifies corresponding internal host names in a comma separated list.\n" +
-                    "\n" +
-                    "        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
-                    "        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the\n" +
-                    "        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.\n" +
-                    "        -->\n" +
-                    "        <provider>\n" +
-                    "            <role>hostmap</role>\n" +
-                    "            <name>static</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
-                    "        </provider>\n" +
-                    "    </gateway>\n";
-
-
-    /**
-     * KNOX-1006
-     *
-     * N.B. This test depends on the DummyServiceDiscovery extension being configured:
-     *             org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
-     */
-    @Test
-    public void testSimpleDescriptorHandler() throws Exception {
-
-        final String type = "DUMMY";
-        final String address = "http://c6401.ambari.apache.org:8080";
-        final String clusterName = "dummy";
-        final Map<String, List<String>> serviceURLs = new HashMap<>();
-        serviceURLs.put("NAMENODE", null);
-        serviceURLs.put("JOBTRACKER", null);
-        serviceURLs.put("WEBHDFS", null);
-        serviceURLs.put("WEBHCAT", null);
-        serviceURLs.put("OOZIE", null);
-        serviceURLs.put("WEBHBASE", null);
-        serviceURLs.put("HIVE", null);
-        serviceURLs.put("RESOURCEMANAGER", null);
-        serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
-
-        // Write the externalized provider config to a temp file
-        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
-
-        File topologyFile = null;
-        try {
-            File destDir = (new File(".")).getCanonicalFile();
-
-            // Mock out the simple descriptor
-            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
-            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
-            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
-            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
-            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
-            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
-            EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
-            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
-            for (String serviceName : serviceURLs.keySet()) {
-                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
-                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
-                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
-                EasyMock.replay(svc);
-                serviceMocks.add(svc);
-            }
-            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
-            EasyMock.replay(testDescriptor);
-
-            // Invoke the simple descriptor handler
-            Map<String, File> files =
-                           SimpleDescriptorHandler.handle(testDescriptor,
-                                                          providerConfig.getParentFile(), // simple desc co-located with provider config
-                                                          destDir);
-            topologyFile = files.get("topology");
-
-            // Validate the resulting topology descriptor
-            assertTrue(topologyFile.exists());
-
-            // Validate the topology descriptor's correctness
-            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
-            if( !validator.validateTopology() ){
-                throw new SAXException( validator.getErrorString() );
-            }
-
-            XPathFactory xPathfactory = XPathFactory.newInstance();
-            XPath xpath = xPathfactory.newXPath();
-
-            // Parse the topology descriptor
-            Document topologyXml = XmlUtils.readXml(topologyFile);
-
-            // Validate the provider configuration
-            Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
-            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
-            assertTrue("Resulting provider config should be identical to the referenced content.",
-                       extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
-
-            // Validate the service declarations
-            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
-            NodeList serviceNodes =
-                        (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
-            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
-                Node serviceNode = serviceNodes.item(serviceNodeIndex);
-                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
-                assertNotNull(roleNode);
-                String role = roleNode.getNodeValue();
-                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
-                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
-                    Node urlNode = urlNodes.item(urlNodeIndex);
-                    assertNotNull(urlNode);
-                    String url = urlNode.getNodeValue();
-                    assertNotNull("Every declared service should have a URL.", url);
-                    if (!topologyServiceURLs.containsKey(role)) {
-                        topologyServiceURLs.put(role, new ArrayList<String>());
-                    }
-                    topologyServiceURLs.get(role).add(url);
-                }
-            }
-            assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
-
-        } catch (Exception e) {
-            e.printStackTrace();
-            fail(e.getMessage());
-        } finally {
-            providerConfig.delete();
-            if (topologyFile != null) {
-                topologyFile.delete();
-            }
-        }
-    }
-
-
-    private File writeProviderConfig(String path, String content) throws IOException {
-        File f = new File(path);
-
-        Writer fw = new FileWriter(f);
-        fw.write(content);
-        fw.flush();
-        fw.close();
-
-        return f;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
index 1e31151..d28ad7f 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
@@ -94,7 +94,7 @@ public class DefaultTopologyServiceTest {
 
     long time = topologyDir.lastModified();
     try {
-      createFile(topologyDir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
+      createFile(topologyDir, "one.xml", "org/apache/knox/gateway/topology/file/topology-one.xml", time);
 
       TestTopologyListener topoListener = new TestTopologyListener();
       FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
@@ -123,7 +123,7 @@ public class DefaultTopologyServiceTest {
       topoListener.events.clear();
 
       // Add a file to the directory.
-      File two = createFile(dir, "two.xml",
+      File two = createFile(topologyDir, "two.xml",
           "org/apache/knox/gateway/topology/file/topology-two.xml", 1L);
       provider.reloadTopologies();
       topologies = provider.getTopologies();
@@ -145,7 +145,7 @@ public class DefaultTopologyServiceTest {
       assertThat(event.getTopology(), notNullValue());
 
       // Update a file in the directory.
-      two = createFile(dir, "two.xml",
+      two = createFile(topologyDir, "two.xml",
           "org/apache/knox/gateway/topology/file/topology-three.xml", 2L);
       provider.reloadTopologies();
       topologies = provider.getTopologies();
@@ -171,7 +171,7 @@ public class DefaultTopologyServiceTest {
 
       // Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006)
       // N.B. This part of the test depends on the DummyServiceDiscovery extension being configured:
-      //         org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+      //         org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
       AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
       EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes();
       EasyMock.replay(aliasService);
@@ -181,14 +181,14 @@ public class DefaultTopologyServiceTest {
       // Write out the referenced provider config first
       File provCfgFile = createFile(sharedProvidersDir,
                                     "ambari-cluster-policy.xml",
-                                    "org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml",
+          "org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml",
                                     1L);
       try {
         // Create the simple descriptor in the descriptors dir
         File simpleDesc =
                 createFile(descriptorsDir,
                            "four.json",
-                           "org/apache/hadoop/gateway/topology/file/simple-topology-four.json",
+                    "org/apache/knox/gateway/topology/file/simple-topology-four.json",
                            1L);
 
         // Trigger the topology generation by noticing the simple descriptor

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
new file mode 100644
index 0000000..ca36cd8
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+import org.apache.knox.gateway.services.security.impl.DefaultAliasService;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.junit.Assert.*;
+
+
+public class PropertiesFileServiceDiscoveryTest {
+
+    private static final Map<String, String> clusterProperties = new HashMap<>();
+    static {
+        clusterProperties.put("mycluster.name", "mycluster");
+        clusterProperties.put("mycluster.NAMENODE", "hdfs://namenodehost:8020");
+        clusterProperties.put("mycluster.JOBTRACKER", "rpc://jobtrackerhostname:8050");
+        clusterProperties.put("mycluster.WEBHCAT", "http://webhcathost:50111/templeton");
+        clusterProperties.put("mycluster.OOZIE", "http://ooziehost:11000/oozie");
+        clusterProperties.put("mycluster.HIVE", "http://hivehostname:10001/clipath");
+        clusterProperties.put("mycluster.RESOURCEMANAGER", "http://remanhost:8088/ws");
+    }
+
+    private static final Properties config = new Properties();
+    static {
+        for (String name : clusterProperties.keySet()) {
+            config.setProperty(name, clusterProperties.get(name));
+        }
+    }
+
+
+    @Test
+    public void testPropertiesFileServiceDiscovery() throws Exception {
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get("PROPERTIES_FILE");
+        assertNotNull(sd);
+
+        String discoveryAddress = this.getClass().getName() + "__test-discovery-source.properties";
+        File discoverySource = new File(discoveryAddress);
+        try {
+            config.store(new FileOutputStream(discoverySource), "Test discovery source for PropertiesFileServiceDiscovery");
+
+            ServiceDiscovery.Cluster c =
+                        sd.discover(new DefaultServiceDiscoveryConfig(discoverySource.getAbsolutePath()), "mycluster");
+            assertNotNull(c);
+            for (String name : clusterProperties.keySet()) {
+                assertEquals(clusterProperties.get(name), c.getServiceURLs(name.split("\\.")[1]).get(0));
+            }
+        } finally {
+            discoverySource.delete();
+        }
+    }
+
+
+    private void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            String value = "";
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    value += url + " ";
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, value));
+        }
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
new file mode 100644
index 0000000..470d8b6
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery;
+
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.impl.DefaultAliasService;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+
+import static org.junit.Assert.*;
+
+
+public class ServiceDiscoveryFactoryTest {
+
+    @Test
+    public void testGetDummyImpl() throws Exception {
+        String TYPE = "DUMMY";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
+    }
+
+
+    @Test
+    public void testGetDummyImplWithMismatchedCase() throws Exception {
+        String TYPE = "dUmmY";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE.toUpperCase(), sd.getType());
+    }
+
+
+    @Test
+    public void testGetInvalidImpl() throws Exception {
+        String TYPE = "InValID";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNull("Unexpected ServiceDiscovery object.", sd);
+    }
+
+
+    @Test
+    public void testGetImplWithMismatchedType() throws Exception {
+        String TYPE = "DeclaredType";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNull("Unexpected ServiceDiscovery object.", sd);
+    }
+
+
+    @Test
+    public void testGetPropertiesFileImplWithAliasServiceInjection() throws Exception {
+        String TYPE = "PROPERTIES_FILE";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE, new DefaultAliasService());
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
+
+        // Verify that the AliasService was injected as expected
+        Field aliasServiceField = sd.getClass().getDeclaredField("aliasService");
+        aliasServiceField.setAccessible(true);
+        Object fieldValue = aliasServiceField.get(sd);
+        assertNotNull(fieldValue);
+        assertTrue(AliasService.class.isAssignableFrom(fieldValue.getClass()));
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
new file mode 100644
index 0000000..1758d25
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.test.extension;
+
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This implementation is intended to be used by tests for which the actual service URLs are of no importance, such that
+ * tests can be written without having a valid service registry (e.g., Ambari) available.
+ */
+public class DummyServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "DUMMY";
+
+    private static final Cluster DUMMY = new Cluster() {
+        @Override
+        public String getName() {
+            return "dummy";
+        }
+
+        @Override
+        public List<String> getServiceURLs(String serviceName) {
+            return Collections.singletonList("http://servicehost:9999/dummy");
+        }
+    };
+
+    private static final Map<String, Cluster> CLUSTERS = new HashMap<>();
+    static {
+        CLUSTERS.put(DUMMY.getName(), DUMMY);
+    }
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        return CLUSTERS;
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        return DUMMY;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
new file mode 100644
index 0000000..3ab311e
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.test.extension;
+
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class DummyServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+        return DummyServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new DummyServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
new file mode 100644
index 0000000..bd3823f
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.discovery.test.extension;
+
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.topology.discovery.GatewayService;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.*;
+
+class PropertiesFileServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "PROPERTIES_FILE";
+
+    @GatewayService
+    AliasService aliasService;
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+    @Override
+    public Map<String, ServiceDiscovery.Cluster> discover(ServiceDiscoveryConfig config) {
+
+        Map<String, ServiceDiscovery.Cluster> result = new HashMap<>();
+
+        Properties p = new Properties();
+        try {
+            p.load(new FileInputStream(config.getAddress()));
+
+            Map<String, Map<String, List<String>>> clusters = new HashMap<>();
+            for (Object key : p.keySet()) {
+                String propertyKey = (String)key;
+                String[] parts = propertyKey.split("\\.");
+                if (parts.length == 2) {
+                    String clusterName = parts[0];
+                    String serviceName = parts[1];
+                    String serviceURL  = p.getProperty(propertyKey);
+                    if (!clusters.containsKey(clusterName)) {
+                        clusters.put(clusterName, new HashMap<String, List<String>>());
+                    }
+                    Map<String, List<String>> serviceURLs = clusters.get(clusterName);
+                    if (!serviceURLs.containsKey(serviceName)) {
+                        serviceURLs.put(serviceName, new ArrayList<String>());
+                    }
+                    serviceURLs.get(serviceName).add(serviceURL);
+                }
+            }
+
+            for (String clusterName : clusters.keySet()) {
+                result.put(clusterName,
+                        new PropertiesFileServiceDiscovery.Cluster(clusterName, clusters.get(clusterName)));
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        return result;
+    }
+
+
+    @Override
+    public ServiceDiscovery.Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        Map<String, ServiceDiscovery.Cluster> clusters = discover(config);
+        return clusters.get(clusterName);
+    }
+
+
+    static class Cluster implements ServiceDiscovery.Cluster {
+        private String name;
+        private Map<String, List<String>> serviceURLS = new HashMap<>();
+
+        Cluster(String name, Map<String, List<String>> serviceURLs) {
+            this.name = name;
+            this.serviceURLS.putAll(serviceURLs);
+        }
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public List<String> getServiceURLs(String serviceName) {
+            return serviceURLS.get(serviceName);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/668aea18/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
new file mode 100644
index 0000000..c3d9ad1
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.discovery.test.extension;
+
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class PropertiesFileServiceDiscoveryType implements
+    ServiceDiscoveryType {
+
+    @Override
+    public String getType() {
+        return PropertiesFileServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new PropertiesFileServiceDiscovery();
+    }
+
+}


[04/22] knox git commit: KNOX-1060 - JWT.getExpires() returns null

Posted by mo...@apache.org.
KNOX-1060 - JWT.getExpires() returns null


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/8537d424
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/8537d424
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/8537d424

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 8537d424205dce5b032bbb4c37362d91dd3cfeb5
Parents: 935f81f
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Sep 22 11:10:59 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Fri Sep 22 11:10:59 2017 +0100

----------------------------------------------------------------------
 .../services/security/token/impl/JWTToken.java        |  8 +++++++-
 .../services/security/token/impl/JWTTokenTest.java    | 14 ++++++++++++++
 2 files changed, 21 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/8537d424/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
index b7b8649..567c156 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
@@ -22,6 +22,8 @@ import java.text.ParseException;
 import java.util.Date;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
+
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 
@@ -214,7 +216,11 @@ public class JWTToken implements JWT {
    */
   @Override
   public String getExpires() {
-    return getClaim(JWT.EXPIRES);
+    Date expires = getExpiresDate();
+    if (expires != null) {
+      return String.valueOf(expires.getTime());
+    }
+    return null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/knox/blob/8537d424/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
index 4ed2ecf..6372f0c 100644
--- a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
@@ -23,6 +23,7 @@ import java.security.NoSuchAlgorithmException;
 import java.security.interfaces.RSAPrivateKey;
 import java.security.interfaces.RSAPublicKey;
 import java.util.ArrayList;
+import java.util.Date;
 
 import org.junit.Test;
 
@@ -206,4 +207,17 @@ public class JWTTokenTest extends org.junit.Assert {
     assertTrue(token.verify(verifier));
   }
 
+  @Test
+  public void testTokenExpiry() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken("RS256", claims);
+
+    assertNotNull(token.getExpires());
+    assertNotNull(token.getExpiresDate());
+    assertEquals(token.getExpiresDate(), new Date(Long.valueOf(token.getExpires())));
+  }
 }


[15/22] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
index 1ef5e83,0000000..9f6f762
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
@@@ -1,448 -1,0 +1,673 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +
 +package org.apache.knox.gateway.services.topology.impl;
 +
 +
 +import org.apache.commons.digester3.Digester;
 +import org.apache.commons.digester3.binder.DigesterLoader;
 +import org.apache.commons.io.FileUtils;
 +import org.apache.commons.io.FilenameUtils;
 +import org.apache.commons.io.monitor.FileAlterationListener;
 +import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
 +import org.apache.commons.io.monitor.FileAlterationMonitor;
 +import org.apache.commons.io.monitor.FileAlterationObserver;
 +import org.apache.knox.gateway.GatewayMessages;
 +import org.apache.knox.gateway.audit.api.Action;
 +import org.apache.knox.gateway.audit.api.ActionOutcome;
 +import org.apache.knox.gateway.audit.api.AuditServiceFactory;
 +import org.apache.knox.gateway.audit.api.Auditor;
 +import org.apache.knox.gateway.audit.api.ResourceType;
 +import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.service.definition.ServiceDefinition;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.knox.gateway.topology.Topology;
 +import org.apache.knox.gateway.topology.TopologyEvent;
 +import org.apache.knox.gateway.topology.TopologyListener;
 +import org.apache.knox.gateway.topology.TopologyMonitor;
 +import org.apache.knox.gateway.topology.TopologyProvider;
 +import org.apache.knox.gateway.topology.builder.TopologyBuilder;
 +import org.apache.knox.gateway.topology.validation.TopologyValidator;
 +import org.apache.knox.gateway.topology.xml.AmbariFormatXmlTopologyRules;
 +import org.apache.knox.gateway.topology.xml.KnoxFormatXmlTopologyRules;
 +import org.apache.knox.gateway.util.ServiceDefinitionsLoader;
++import org.apache.knox.gateway.services.security.AliasService;
++import org.apache.knox.gateway.topology.simple.SimpleDescriptorHandler;
 +import org.eclipse.persistence.jaxb.JAXBContextProperties;
 +import org.xml.sax.SAXException;
 +
 +import javax.xml.bind.JAXBContext;
 +import javax.xml.bind.JAXBException;
 +import javax.xml.bind.Marshaller;
 +import java.io.File;
 +import java.io.FileFilter;
 +import java.io.IOException;
 +import java.net.URISyntaxException;
 +import java.util.ArrayList;
 +import java.util.Collection;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +
 +import static org.apache.commons.digester3.binder.DigesterLoader.newLoader;
 +
 +
 +public class DefaultTopologyService
 +    extends FileAlterationListenerAdaptor
 +    implements TopologyService, TopologyMonitor, TopologyProvider, FileFilter, FileAlterationListener {
++
 +  private static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor(
 +    AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
 +    AuditConstants.KNOX_COMPONENT_NAME);
++
 +  private static final List<String> SUPPORTED_TOPOLOGY_FILE_EXTENSIONS = new ArrayList<String>();
 +  static {
 +    SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("xml");
 +    SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("conf");
 +  }
++
 +  private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
 +  private static DigesterLoader digesterLoader = newLoader(new KnoxFormatXmlTopologyRules(), new AmbariFormatXmlTopologyRules());
-   private FileAlterationMonitor monitor;
-   private File directory;
++  private List<FileAlterationMonitor> monitors = new ArrayList<>();
++  private File topologiesDirectory;
++  private File descriptorsDirectory;
++
 +  private Set<TopologyListener> listeners;
 +  private volatile Map<File, Topology> topologies;
++  private AliasService aliasService;
++
 +
 +  private Topology loadTopology(File file) throws IOException, SAXException, URISyntaxException, InterruptedException {
 +    final long TIMEOUT = 250; //ms
 +    final long DELAY = 50; //ms
 +    log.loadingTopologyFile(file.getAbsolutePath());
 +    Topology topology;
 +    long start = System.currentTimeMillis();
 +    while (true) {
 +      try {
 +        topology = loadTopologyAttempt(file);
 +        break;
 +      } catch (IOException e) {
 +        if (System.currentTimeMillis() - start < TIMEOUT) {
 +          log.failedToLoadTopologyRetrying(file.getAbsolutePath(), Long.toString(DELAY), e);
 +          Thread.sleep(DELAY);
 +        } else {
 +          throw e;
 +        }
 +      } catch (SAXException e) {
 +        if (System.currentTimeMillis() - start < TIMEOUT) {
 +          log.failedToLoadTopologyRetrying(file.getAbsolutePath(), Long.toString(DELAY), e);
 +          Thread.sleep(DELAY);
 +        } else {
 +          throw e;
 +        }
 +      }
 +    }
 +    return topology;
 +  }
 +
 +  private Topology loadTopologyAttempt(File file) throws IOException, SAXException, URISyntaxException {
 +    Topology topology;
 +    Digester digester = digesterLoader.newDigester();
 +    TopologyBuilder topologyBuilder = digester.parse(FileUtils.openInputStream(file));
 +    if (null == topologyBuilder) {
 +      return null;
 +    }
 +    topology = topologyBuilder.build();
 +    topology.setUri(file.toURI());
 +    topology.setName(FilenameUtils.removeExtension(file.getName()));
 +    topology.setTimestamp(file.lastModified());
 +    return topology;
 +  }
 +
 +  private void redeployTopology(Topology topology) {
 +    File topologyFile = new File(topology.getUri());
 +    try {
 +      TopologyValidator tv = new TopologyValidator(topology);
 +
 +      if(tv.validateTopology()) {
 +        throw new SAXException(tv.getErrorString());
 +      }
 +
 +      long start = System.currentTimeMillis();
 +      long limit = 1000L; // One second.
 +      long elapsed = 1;
 +      while (elapsed <= limit) {
 +        try {
 +          long origTimestamp = topologyFile.lastModified();
 +          long setTimestamp = Math.max(System.currentTimeMillis(), topologyFile.lastModified() + elapsed);
 +          if(topologyFile.setLastModified(setTimestamp)) {
 +            long newTimstamp = topologyFile.lastModified();
 +            if(newTimstamp > origTimestamp) {
 +              break;
 +            } else {
 +              Thread.sleep(10);
 +              elapsed = System.currentTimeMillis() - start;
 +              continue;
 +            }
 +          } else {
 +            auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY,
 +                ActionOutcome.FAILURE);
 +            log.failedToRedeployTopology(topology.getName());
 +            break;
 +          }
 +        } catch (InterruptedException e) {
 +          auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY,
 +              ActionOutcome.FAILURE);
 +          log.failedToRedeployTopology(topology.getName(), e);
 +          e.printStackTrace();
 +        }
 +      }
 +    } catch (SAXException e) {
 +      auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +      log.failedToRedeployTopology(topology.getName(), e);
 +    }
 +  }
 +
 +  private List<TopologyEvent> createChangeEvents(
 +      Map<File, Topology> oldTopologies,
 +      Map<File, Topology> newTopologies) {
 +    ArrayList<TopologyEvent> events = new ArrayList<TopologyEvent>();
 +    // Go through the old topologies and find anything that was deleted.
 +    for (File file : oldTopologies.keySet()) {
 +      if (!newTopologies.containsKey(file)) {
 +        events.add(new TopologyEvent(TopologyEvent.Type.DELETED, oldTopologies.get(file)));
 +      }
 +    }
 +    // Go through the new topologies and figure out what was updated vs added.
 +    for (File file : newTopologies.keySet()) {
 +      if (oldTopologies.containsKey(file)) {
 +        Topology oldTopology = oldTopologies.get(file);
 +        Topology newTopology = newTopologies.get(file);
 +        if (newTopology.getTimestamp() > oldTopology.getTimestamp()) {
 +          events.add(new TopologyEvent(TopologyEvent.Type.UPDATED, newTopologies.get(file)));
 +        }
 +      } else {
 +        events.add(new TopologyEvent(TopologyEvent.Type.CREATED, newTopologies.get(file)));
 +      }
 +    }
 +    return events;
 +  }
 +
 +  private File calculateAbsoluteTopologiesDir(GatewayConfig config) {
- 
-     File topoDir = new File(config.getGatewayTopologyDir());
++    String normalizedTopologyDir = FilenameUtils.normalize(config.getGatewayTopologyDir());
++    File topoDir = new File(normalizedTopologyDir);
 +    topoDir = topoDir.getAbsoluteFile();
 +    return topoDir;
 +  }
 +
-   private void initListener(FileAlterationMonitor monitor, File directory) {
-     this.directory = directory;
-     this.monitor = monitor;
++  private File calculateAbsoluteConfigDir(GatewayConfig config) {
++    File configDir = null;
 +
++    String path = FilenameUtils.normalize(config.getGatewayConfDir());
++    if (path != null) {
++      configDir = new File(config.getGatewayConfDir());
++    } else {
++      configDir = (new File(config.getGatewayTopologyDir())).getParentFile();
++    }
++    configDir = configDir.getAbsoluteFile();
 +
-     FileAlterationObserver observer = new FileAlterationObserver(this.directory, this);
-     observer.addListener(this);
-     monitor.addObserver(observer);
++    return configDir;
++  }
 +
-     this.listeners = new HashSet<>();
-     this.topologies = new HashMap<>(); //loadTopologies( this.directory );
++  private void  initListener(FileAlterationMonitor  monitor,
++                            File                   directory,
++                            FileFilter             filter,
++                            FileAlterationListener listener) {
++    monitors.add(monitor);
++    FileAlterationObserver observer = new FileAlterationObserver(directory, filter);
++    observer.addListener(listener);
++    monitor.addObserver(observer);
 +  }
 +
-   private void initListener(File directory) throws IOException, SAXException {
++  private void initListener(File directory, FileFilter filter, FileAlterationListener listener) throws IOException, SAXException {
 +    // Increasing the monitoring interval to 5 seconds as profiling has shown
 +    // this is rather expensive in terms of generated garbage objects.
-     initListener(new FileAlterationMonitor(5000L), directory);
++    initListener(new FileAlterationMonitor(5000L), directory, filter, listener);
 +  }
 +
 +  private Map<File, Topology> loadTopologies(File directory) {
 +    Map<File, Topology> map = new HashMap<>();
 +    if (directory.isDirectory() && directory.canRead()) {
 +      for (File file : directory.listFiles(this)) {
 +        try {
 +          Topology loadTopology = loadTopology(file);
 +          if (null != loadTopology) {
 +            map.put(file, loadTopology);
 +          } else {
 +            auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
 +              ActionOutcome.FAILURE);
 +            log.failedToLoadTopology(file.getAbsolutePath());
 +          }
 +        } catch (IOException e) {
 +          // Maybe it makes sense to throw exception
 +          auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
 +            ActionOutcome.FAILURE);
 +          log.failedToLoadTopology(file.getAbsolutePath(), e);
 +        } catch (SAXException e) {
 +          // Maybe it makes sense to throw exception
 +          auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
 +            ActionOutcome.FAILURE);
 +          log.failedToLoadTopology(file.getAbsolutePath(), e);
 +        } catch (Exception e) {
 +          // Maybe it makes sense to throw exception
 +          auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
 +            ActionOutcome.FAILURE);
 +          log.failedToLoadTopology(file.getAbsolutePath(), e);
 +        }
 +      }
 +    }
 +    return map;
 +  }
 +
++  public void setAliasService(AliasService as) {
++    this.aliasService = as;
++  }
++
 +  public void deployTopology(Topology t){
 +
 +    try {
-       File temp = new File(directory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
++      File temp = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
 +      Package topologyPkg = Topology.class.getPackage();
 +      String pkgName = topologyPkg.getName();
 +      String bindingFile = pkgName.replace(".", "/") + "/topology_binding-xml.xml";
 +
 +      Map<String, Object> properties = new HashMap<>(1);
 +      properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, bindingFile);
 +      JAXBContext jc = JAXBContext.newInstance(pkgName, Topology.class.getClassLoader(), properties);
 +      Marshaller mr = jc.createMarshaller();
 +
 +      mr.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
 +      mr.marshal(t, temp);
 +
-       File topology = new File(directory.getAbsolutePath() + "/" + t.getName() + ".xml");
++      File topology = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml");
 +      if(!temp.renameTo(topology)) {
 +        FileUtils.forceDelete(temp);
 +        throw new IOException("Could not rename temp file");
 +      }
 +
 +      // This code will check if the topology is valid, and retrieve the errors if it is not.
 +      TopologyValidator validator = new TopologyValidator( topology.getAbsolutePath() );
 +      if( !validator.validateTopology() ){
 +        throw new SAXException( validator.getErrorString() );
 +      }
 +
 +
 +    } catch (JAXBException e) {
 +      auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +      log.failedToDeployTopology(t.getName(), e);
 +    } catch (IOException io) {
 +      auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +      log.failedToDeployTopology(t.getName(), io);
 +    } catch (SAXException sx){
 +      auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +      log.failedToDeployTopology(t.getName(), sx);
 +    }
 +    reloadTopologies();
 +  }
 +
 +  public void redeployTopologies(String topologyName) {
 +
 +    for (Topology topology : getTopologies()) {
 +      if (topologyName == null || topologyName.equals(topology.getName())) {
 +        redeployTopology(topology);
 +      }
 +    }
 +
 +  }
 +
 +  public void reloadTopologies() {
 +    try {
 +      synchronized (this) {
 +        Map<File, Topology> oldTopologies = topologies;
-         Map<File, Topology> newTopologies = loadTopologies(directory);
++        Map<File, Topology> newTopologies = loadTopologies(topologiesDirectory);
 +        List<TopologyEvent> events = createChangeEvents(oldTopologies, newTopologies);
 +        topologies = newTopologies;
 +        notifyChangeListeners(events);
 +      }
 +    } catch (Exception e) {
 +      // Maybe it makes sense to throw exception
 +      log.failedToReloadTopologies(e);
 +    }
 +  }
 +
 +  public void deleteTopology(Topology t) {
-     File topoDir = directory;
++    File topoDir = topologiesDirectory;
 +
 +    if(topoDir.isDirectory() && topoDir.canRead()) {
 +      File[] results = topoDir.listFiles();
 +      for (File f : results) {
 +        String fName = FilenameUtils.getBaseName(f.getName());
 +        if(fName.equals(t.getName())) {
 +          f.delete();
 +        }
 +      }
 +    }
 +    reloadTopologies();
 +  }
 +
 +  private void notifyChangeListeners(List<TopologyEvent> events) {
 +    for (TopologyListener listener : listeners) {
 +      try {
 +        listener.handleTopologyEvent(events);
 +      } catch (RuntimeException e) {
 +        auditor.audit(Action.LOAD, "Topology_Event", ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
 +        log.failedToHandleTopologyEvents(e);
 +      }
 +    }
 +  }
 +
 +  public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config) {
 +    File tFile = null;
 +    Map<String, List<String>> urls = new HashMap<>();
-     if(directory.isDirectory() && directory.canRead()) {
-       for(File f : directory.listFiles()){
++    if(topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
++      for(File f : topologiesDirectory.listFiles()){
 +        if(FilenameUtils.removeExtension(f.getName()).equals(t.getName())){
 +          tFile = f;
 +        }
 +      }
 +    }
 +    Set<ServiceDefinition> defs;
 +    if(tFile != null) {
 +      defs = ServiceDefinitionsLoader.getServiceDefinitions(new File(config.getGatewayServicesDir()));
 +
 +      for(ServiceDefinition def : defs) {
 +        urls.put(def.getRole(), def.getTestURLs());
 +      }
 +    }
 +    return urls;
 +  }
 +
 +  public Collection<Topology> getTopologies() {
 +    Map<File, Topology> map = topologies;
 +    return Collections.unmodifiableCollection(map.values());
 +  }
 +
 +  @Override
 +  public void addTopologyChangeListener(TopologyListener listener) {
 +    listeners.add(listener);
 +  }
 +
 +  @Override
 +  public void startMonitor() throws Exception {
-     monitor.start();
++    for (FileAlterationMonitor monitor : monitors) {
++      monitor.start();
++    }
 +  }
 +
 +  @Override
 +  public void stopMonitor() throws Exception {
-     monitor.stop();
++    for (FileAlterationMonitor monitor : monitors) {
++      monitor.stop();
++    }
 +  }
 +
 +  @Override
 +  public boolean accept(File file) {
 +    boolean accept = false;
 +    if (!file.isDirectory() && file.canRead()) {
 +      String extension = FilenameUtils.getExtension(file.getName());
 +      if (SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.contains(extension)) {
 +        accept = true;
 +      }
 +    }
 +    return accept;
 +  }
 +
 +  @Override
 +  public void onFileCreate(File file) {
 +    onFileChange(file);
 +  }
 +
 +  @Override
 +  public void onFileDelete(java.io.File file) {
++    // For full topology descriptors, we need to make sure to delete any corresponding simple descriptors to prevent
++    // unintended subsequent generation of the topology descriptor
++    for (String ext : DescriptorsMonitor.SUPPORTED_EXTENSIONS) {
++      File simpleDesc =
++              new File(descriptorsDirectory, FilenameUtils.getBaseName(file.getName()) + "." + ext);
++      if (simpleDesc.exists()) {
++        simpleDesc.delete();
++      }
++    }
++
 +    onFileChange(file);
 +  }
 +
 +  @Override
 +  public void onFileChange(File file) {
 +    reloadTopologies();
 +  }
 +
 +  @Override
 +  public void stop() {
 +
 +  }
 +
 +  @Override
 +  public void start() {
 +
 +  }
 +
 +  @Override
-   public void init(GatewayConfig config, Map<String, String> options) throws
-       ServiceLifecycleException {
++  public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
 +
 +    try {
-       initListener(calculateAbsoluteTopologiesDir(config));
-     } catch (IOException io) {
++      listeners = new HashSet<>();
++      topologies = new HashMap<>();
++
++      topologiesDirectory = calculateAbsoluteTopologiesDir(config);
++
++      File configDirectory = calculateAbsoluteConfigDir(config);
++      descriptorsDirectory = new File(configDirectory, "descriptors");
++      File sharedProvidersDirectory = new File(configDirectory, "shared-providers");
++
++      // Add support for conf/topologies
++      initListener(topologiesDirectory, this, this);
++
++      // Add support for conf/descriptors
++      DescriptorsMonitor dm = new DescriptorsMonitor(topologiesDirectory, aliasService);
++      initListener(descriptorsDirectory,
++                   dm,
++                   dm);
++
++      // Add support for conf/shared-providers
++      SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(dm, descriptorsDirectory);
++      initListener(sharedProvidersDirectory, spm, spm);
++
++    } catch (IOException | SAXException io) {
 +      throw new ServiceLifecycleException(io.getMessage());
-     } catch (SAXException sax) {
-       throw new ServiceLifecycleException(sax.getMessage());
++    }
++  }
++
++
++  /**
++   * Change handler for simple descriptors
++   */
++  public static class DescriptorsMonitor extends FileAlterationListenerAdaptor
++                                          implements FileFilter {
++
++    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<String>();
++    static {
++      SUPPORTED_EXTENSIONS.add("json");
++      SUPPORTED_EXTENSIONS.add("yml");
++    }
++
++    private File topologiesDir;
++
++    private AliasService aliasService;
++
++    private Map<String, List<String>> providerConfigReferences = new HashMap<>();
++
++
++    public DescriptorsMonitor(File topologiesDir, AliasService aliasService) {
++      this.topologiesDir  = topologiesDir;
++      this.aliasService   = aliasService;
++    }
++
++    List<String> getReferencingDescriptors(String providerConfigPath) {
++      List<String> result = providerConfigReferences.get(providerConfigPath);
++      if (result == null) {
++        result = Collections.emptyList();
++      }
++      return result;
++    }
++
++    @Override
++    public void onFileCreate(File file) {
++      onFileChange(file);
++    }
++
++    @Override
++    public void onFileDelete(File file) {
++      // For simple descriptors, we need to make sure to delete any corresponding full topology descriptors to trigger undeployment
++      for (String ext : DefaultTopologyService.SUPPORTED_TOPOLOGY_FILE_EXTENSIONS) {
++        File topologyFile =
++                new File(topologiesDir, FilenameUtils.getBaseName(file.getName()) + "." + ext);
++        if (topologyFile.exists()) {
++          topologyFile.delete();
++        }
++      }
++
++      String normalizedFilePath = FilenameUtils.normalize(file.getAbsolutePath());
++      String reference = null;
++      for (Map.Entry<String, List<String>> entry : providerConfigReferences.entrySet()) {
++        if (entry.getValue().contains(normalizedFilePath)) {
++          reference = entry.getKey();
++          break;
++        }
++      }
++      if (reference != null) {
++        providerConfigReferences.get(reference).remove(normalizedFilePath);
++      }
++    }
++
++    @Override
++    public void onFileChange(File file) {
++      try {
++        // When a simple descriptor has been created or modified, generate the new topology descriptor
++        Map<String, File> result = SimpleDescriptorHandler.handle(file, topologiesDir, aliasService);
++
++        // Add the provider config reference relationship for handling updates to the provider config
++        String providerConfig = FilenameUtils.normalize(result.get("reference").getAbsolutePath());
++        if (!providerConfigReferences.containsKey(providerConfig)) {
++          providerConfigReferences.put(providerConfig, new ArrayList<String>());
++        }
++        List<String> refs = providerConfigReferences.get(providerConfig);
++        String descriptorName = FilenameUtils.normalize(file.getAbsolutePath());
++        if (!refs.contains(descriptorName)) {
++          // Need to check if descriptor had previously referenced another provider config, so it can be removed
++          for (List<String> descs : providerConfigReferences.values()) {
++            if (descs.contains(descriptorName)) {
++              descs.remove(descriptorName);
++            }
++          }
++
++          // Add the current reference relationship
++          refs.add(descriptorName);
++        }
++      } catch (Exception e) {
++        log.simpleDescriptorHandlingError(file.getName(), e);
++      }
++    }
++
++    @Override
++    public boolean accept(File file) {
++      boolean accept = false;
++      if (!file.isDirectory() && file.canRead()) {
++        String extension = FilenameUtils.getExtension(file.getName());
++        if (SUPPORTED_EXTENSIONS.contains(extension)) {
++          accept = true;
++        }
++      }
++      return accept;
++    }
++  }
++
++  /**
++   * Change handler for shared provider configurations
++   */
++  public static class SharedProviderConfigMonitor extends FileAlterationListenerAdaptor
++          implements FileFilter {
++
++    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<>();
++    static {
++      SUPPORTED_EXTENSIONS.add("xml");
 +    }
 +
++    private DescriptorsMonitor descriptorsMonitor;
++    private File descriptorsDir;
++
++
++    SharedProviderConfigMonitor(DescriptorsMonitor descMonitor, File descriptorsDir) {
++      this.descriptorsMonitor = descMonitor;
++      this.descriptorsDir     = descriptorsDir;
++    }
++
++    @Override
++    public void onFileCreate(File file) {
++      onFileChange(file);
++    }
++
++    @Override
++    public void onFileDelete(File file) {
++      onFileChange(file);
++    }
++
++    @Override
++    public void onFileChange(File file) {
++      // For shared provider configuration, we need to update any simple descriptors that reference it
++      for (File descriptor : getReferencingDescriptors(file)) {
++        descriptor.setLastModified(System.currentTimeMillis());
++      }
++    }
++
++    private List<File> getReferencingDescriptors(File sharedProviderConfig) {
++      List<File> references = new ArrayList<>();
++
++      for (File descriptor : descriptorsDir.listFiles()) {
++        if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
++          for (String reference : descriptorsMonitor.getReferencingDescriptors(FilenameUtils.normalize(sharedProviderConfig.getAbsolutePath()))) {
++            references.add(new File(reference));
++          }
++        }
++      }
++
++      return references;
++    }
++
++    @Override
++    public boolean accept(File file) {
++      boolean accept = false;
++      if (!file.isDirectory() && file.canRead()) {
++        String extension = FilenameUtils.getExtension(file.getName());
++        if (SUPPORTED_EXTENSIONS.contains(extension)) {
++          accept = true;
++        }
++      }
++      return accept;
++    }
 +  }
++
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-server/src/main/java/org/apache/knox/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index ee64d15,0000000..1caa946
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@@ -1,94 -1,0 +1,94 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.builder;
 +
 +import java.util.ArrayList;
 +import java.util.List;
 +
 +import org.apache.knox.gateway.topology.Application;
 +import org.apache.knox.gateway.topology.Provider;
 +import org.apache.knox.gateway.topology.Service;
 +import org.apache.knox.gateway.topology.Topology;
 +
 +public class BeanPropertyTopologyBuilder implements TopologyBuilder {
 +
 +    private String name;
 +    private List<Provider> providers;
 +    private List<Service> services;
 +    private List<Application> applications;
 +
 +    public BeanPropertyTopologyBuilder() {
 +        providers = new ArrayList<Provider>();
 +        services = new ArrayList<Service>();
 +        applications = new ArrayList<Application>();
 +    }
 +
 +    public BeanPropertyTopologyBuilder name(String name) {
 +        this.name = name;
 +        return this;
 +    }
 +
 +    public String name() {
 +        return name;
 +    }
 +
 +    public BeanPropertyTopologyBuilder addProvider(Provider provider) {
 +        providers.add(provider);
 +        return this;
 +    }
 +
 +    public List<Provider> providers() {
 +        return providers;
 +    }
 +
 +    public BeanPropertyTopologyBuilder addService(Service service) {
 +        services.add(service);
 +        return this;
 +    }
 +
 +    public List<Service> services() {
 +        return services;
 +    }
 +
 +    public BeanPropertyTopologyBuilder addApplication( Application application ) {
 +        applications.add(application);
 +        return this;
 +    }
 +
 +    public List<Application> applications() {
 +        return applications;
 +    }
 +
 +    public Topology build() {
 +        Topology topology = new Topology();
 +        topology.setName(name);
 +
-           for (Provider provider : providers) {
++        for (Provider provider : providers) {
 +            topology.addProvider(provider);
 +        }
 +
 +        for (Service service : services) {
 +            topology.addService(service);
 +        }
 +
 +        for (Application application : applications) {
 +            topology.addApplication(application);
 +        }
 +
 +        return topology;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
index 1b52625,0000000..1e31151
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
@@@ -1,215 -1,0 +1,266 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services.topology;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.commons.io.IOUtils;
 +import org.apache.commons.io.monitor.FileAlterationMonitor;
 +import org.apache.commons.io.monitor.FileAlterationObserver;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.knox.gateway.topology.Param;
 +import org.apache.knox.gateway.topology.Provider;
 +import org.apache.knox.gateway.topology.Topology;
 +import org.apache.knox.gateway.topology.TopologyEvent;
 +import org.apache.knox.gateway.topology.TopologyListener;
++import org.apache.knox.gateway.services.security.AliasService;
 +import org.easymock.EasyMock;
 +import org.junit.After;
 +import org.junit.Before;
 +import org.junit.Test;
 +
 +import java.io.File;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.io.OutputStream;
 +import java.util.*;
 +
++import static org.easymock.EasyMock.anyObject;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.Matchers.hasItem;
 +import static org.hamcrest.core.IsNull.notNullValue;
 +import static org.junit.Assert.assertThat;
 +import static org.junit.Assert.assertTrue;
 +
 +public class DefaultTopologyServiceTest {
 +
 +  @Before
 +  public void setUp() throws Exception {
 +  }
 +
 +  @After
 +  public void tearDown() throws Exception {
 +  }
 +
 +  private File createDir() throws IOException {
 +    return TestUtils.createTempDir(this.getClass().getSimpleName() + "-");
 +  }
 +
 +  private File createFile(File parent, String name, String resource, long timestamp) throws IOException {
 +    File file = new File(parent, name);
 +    if (!file.exists()) {
 +      FileUtils.touch(file);
 +    }
 +    InputStream input = ClassLoader.getSystemResourceAsStream(resource);
 +    OutputStream output = FileUtils.openOutputStream(file);
 +    IOUtils.copy(input, output);
 +    //KNOX-685: output.flush();
 +    input.close();
 +    output.close();
 +    file.setLastModified(timestamp);
 +    assertTrue("Failed to create test file " + file.getAbsolutePath(), file.exists());
 +    assertTrue("Failed to populate test file " + file.getAbsolutePath(), file.length() > 0);
 +
 +    return file;
 +  }
 +
 +  @Test
 +  public void testGetTopologies() throws Exception {
 +
 +    File dir = createDir();
-     long time = dir.lastModified();
++    File topologyDir = new File(dir, "topologies");
++
++    File descriptorsDir = new File(dir, "descriptors");
++    descriptorsDir.mkdirs();
++
++    File sharedProvidersDir = new File(dir, "shared-providers");
++    sharedProvidersDir.mkdirs();
++
++    long time = topologyDir.lastModified();
 +    try {
-       createFile(dir, "one.xml",
-           "org/apache/knox/gateway/topology/file/topology-one.xml", time);
++      createFile(topologyDir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
 +
 +      TestTopologyListener topoListener = new TestTopologyListener();
 +      FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
 +
 +      TopologyService provider = new DefaultTopologyService();
 +      Map<String, String> c = new HashMap<>();
 +
 +      GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-       EasyMock.expect(config.getGatewayTopologyDir()).andReturn(dir.toString()).anyTimes();
++      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
++      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
 +      EasyMock.replay(config);
 +
 +      provider.init(config, c);
 +
- 
 +      provider.addTopologyChangeListener(topoListener);
 +
 +      provider.reloadTopologies();
 +
- 
 +      Collection<Topology> topologies = provider.getTopologies();
 +      assertThat(topologies, notNullValue());
 +      assertThat(topologies.size(), is(1));
 +      Topology topology = topologies.iterator().next();
 +      assertThat(topology.getName(), is("one"));
 +      assertThat(topology.getTimestamp(), is(time));
 +      assertThat(topoListener.events.size(), is(1));
 +      topoListener.events.clear();
 +
 +      // Add a file to the directory.
 +      File two = createFile(dir, "two.xml",
 +          "org/apache/knox/gateway/topology/file/topology-two.xml", 1L);
 +      provider.reloadTopologies();
 +      topologies = provider.getTopologies();
 +      assertThat(topologies.size(), is(2));
 +      Set<String> names = new HashSet<>(Arrays.asList("one", "two"));
 +      Iterator<Topology> iterator = topologies.iterator();
 +      topology = iterator.next();
 +      assertThat(names, hasItem(topology.getName()));
 +      names.remove(topology.getName());
 +      topology = iterator.next();
 +      assertThat(names, hasItem(topology.getName()));
 +      names.remove(topology.getName());
 +      assertThat(names.size(), is(0));
 +      assertThat(topoListener.events.size(), is(1));
 +      List<TopologyEvent> events = topoListener.events.get(0);
 +      assertThat(events.size(), is(1));
 +      TopologyEvent event = events.get(0);
 +      assertThat(event.getType(), is(TopologyEvent.Type.CREATED));
 +      assertThat(event.getTopology(), notNullValue());
 +
 +      // Update a file in the directory.
 +      two = createFile(dir, "two.xml",
 +          "org/apache/knox/gateway/topology/file/topology-three.xml", 2L);
 +      provider.reloadTopologies();
 +      topologies = provider.getTopologies();
 +      assertThat(topologies.size(), is(2));
 +      names = new HashSet<>(Arrays.asList("one", "two"));
 +      iterator = topologies.iterator();
 +      topology = iterator.next();
 +      assertThat(names, hasItem(topology.getName()));
 +      names.remove(topology.getName());
 +      topology = iterator.next();
 +      assertThat(names, hasItem(topology.getName()));
 +      names.remove(topology.getName());
 +      assertThat(names.size(), is(0));
 +
 +      // Remove a file from the directory.
 +      two.delete();
 +      provider.reloadTopologies();
 +      topologies = provider.getTopologies();
 +      assertThat(topologies.size(), is(1));
 +      topology = topologies.iterator().next();
 +      assertThat(topology.getName(), is("one"));
 +      assertThat(topology.getTimestamp(), is(time));
++
++      // Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006)
++      // N.B. This part of the test depends on the DummyServiceDiscovery extension being configured:
++      //         org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
++      AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
++      EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes();
++      EasyMock.replay(aliasService);
++      DefaultTopologyService.DescriptorsMonitor dm =
++                                          new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
++
++      // Write out the referenced provider config first
++      File provCfgFile = createFile(sharedProvidersDir,
++                                    "ambari-cluster-policy.xml",
++                                    "org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml",
++                                    1L);
++      try {
++        // Create the simple descriptor in the descriptors dir
++        File simpleDesc =
++                createFile(descriptorsDir,
++                           "four.json",
++                           "org/apache/hadoop/gateway/topology/file/simple-topology-four.json",
++                           1L);
++
++        // Trigger the topology generation by noticing the simple descriptor
++        dm.onFileChange(simpleDesc);
++
++        // Load the generated topology
++        provider.reloadTopologies();
++        topologies = provider.getTopologies();
++        assertThat(topologies.size(), is(2));
++        names = new HashSet<>(Arrays.asList("one", "four"));
++        iterator = topologies.iterator();
++        topology = iterator.next();
++        assertThat(names, hasItem(topology.getName()));
++        names.remove(topology.getName());
++        topology = iterator.next();
++        assertThat(names, hasItem(topology.getName()));
++        names.remove(topology.getName());
++        assertThat(names.size(), is(0));
++      } finally {
++        provCfgFile.delete();
++
++      }
 +    } finally {
 +      FileUtils.deleteQuietly(dir);
 +    }
 +  }
 +
 +  private void kickMonitor(FileAlterationMonitor monitor) {
 +    for (FileAlterationObserver observer : monitor.getObservers()) {
 +      observer.checkAndNotify();
 +    }
 +  }
 +
 +  @Test
 +  public void testProviderParamsOrderIsPreserved() {
 +
 +    Provider provider = new Provider();
 +    String names[] = {"ldapRealm=",
 +        "ldapContextFactory",
 +        "ldapRealm.contextFactory",
 +        "ldapGroupRealm",
 +        "ldapGroupRealm.contextFactory",
 +        "ldapGroupRealm.contextFactory.systemAuthenticationMechanism"
 +    };
 +
 +    Param param = null;
 +    for (String name : names) {
 +      param = new Param();
 +      param.setName(name);
 +      param.setValue(name);
 +      provider.addParam(param);
 +
 +    }
 +    Map<String, String> params = provider.getParams();
 +    Set<String> keySet = params.keySet();
 +    Iterator<String> iter = keySet.iterator();
 +    int i = 0;
 +    while (iter.hasNext()) {
 +      assertTrue(iter.next().equals(names[i++]));
 +    }
 +
 +  }
 +
 +  private class TestTopologyListener implements TopologyListener {
 +
 +    public ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
 +
 +    @Override
 +    public void handleTopologyEvent(List<TopologyEvent> events) {
 +      this.events.add(events);
 +    }
 +
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-service-knoxsso/src/test/java/org/apache/knox/gateway/service/knoxsso/WebSSOResourceTest.java
----------------------------------------------------------------------
diff --cc gateway-service-knoxsso/src/test/java/org/apache/knox/gateway/service/knoxsso/WebSSOResourceTest.java
index 864440c,0000000..6f0a805
mode 100644,000000..100644
--- a/gateway-service-knoxsso/src/test/java/org/apache/knox/gateway/service/knoxsso/WebSSOResourceTest.java
+++ b/gateway-service-knoxsso/src/test/java/org/apache/knox/gateway/service/knoxsso/WebSSOResourceTest.java
@@@ -1,352 -1,0 +1,352 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.service.knoxsso;
 +
 +import org.apache.knox.gateway.util.RegExUtils;
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertNotNull;
 +import static org.junit.Assert.assertTrue;
 +
 +import java.security.KeyPair;
 +import java.security.KeyPairGenerator;
 +import java.security.NoSuchAlgorithmException;
 +import java.security.Principal;
 +import java.security.interfaces.RSAPrivateKey;
 +import java.security.interfaces.RSAPublicKey;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +import javax.security.auth.Subject;
 +import javax.servlet.ServletContext;
 +import javax.servlet.ServletOutputStream;
 +import javax.servlet.http.Cookie;
 +import javax.servlet.http.HttpServletRequest;
 +import javax.servlet.http.HttpServletResponse;
 +import javax.servlet.http.HttpServletResponseWrapper;
 +
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
 +import org.apache.knox.gateway.services.security.token.TokenServiceException;
 +import org.apache.knox.gateway.services.security.token.impl.JWT;
 +import org.apache.knox.gateway.services.security.token.impl.JWTToken;
 +import org.apache.knox.gateway.util.RegExUtils;
 +import org.easymock.EasyMock;
 +import org.junit.Assert;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +
 +import com.nimbusds.jose.JWSSigner;
 +import com.nimbusds.jose.JWSVerifier;
 +import com.nimbusds.jose.crypto.RSASSASigner;
 +import com.nimbusds.jose.crypto.RSASSAVerifier;
 +
 +/**
 + * Some tests for the Knox SSO service.
 + */
 +public class WebSSOResourceTest {
 +
 +  protected static RSAPublicKey publicKey;
 +  protected static RSAPrivateKey privateKey;
 +
 +  @BeforeClass
 +  public static void setup() throws Exception, NoSuchAlgorithmException {
 +    KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
 +    kpg.initialize(1024);
 +    KeyPair KPair = kpg.generateKeyPair();
 +
 +    publicKey = (RSAPublicKey) KPair.getPublic();
 +    privateKey = (RSAPrivateKey) KPair.getPrivate();
 +  }
 +
 +  @Test
 +  public void testWhitelistMatching() throws Exception {
 +    String whitelist = "^https?://.*example.com:8080/.*$;" +
 +        "^https?://.*example.com/.*$;" +
 +        "^https?://.*example2.com:\\d{0,9}/.*$;" +
 +        "^https://.*example3.com:\\d{0,9}/.*$;" +
 +        "^https?://localhost:\\d{0,9}/.*$;^/.*$";
 +
 +    // match on explicit hostname/domain and port
 +    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example.com:8080/"));
 +    // match on non-required port
 +    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example.com/"));
 +    // match on required but any port
 +    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example2.com:1234/"));
 +    // fail on missing port
 +    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example2.com/"));
 +    // fail on invalid port
 +    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example.com:8081/"));
 +    // fail on alphanumeric port
 +    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example.com:A080/"));
 +    // fail on invalid hostname/domain
 +    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example.net:8080/"));
 +    // fail on required port
 +    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example2.com/"));
 +    // fail on required https
 +    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
 +        "http://host.example3.com/"));
 +    // match on localhost and port
 +    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
 +        "http://localhost:8080/"));
 +    // match on local/relative path
 +    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
 +        "/local/resource/"));
 +  }
 +
 +  @Test
 +  public void testGetToken() throws Exception {
 +
 +    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
 +    EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
 +
 +    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +    EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
 +    EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
 +    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
 +
 +    Principal principal = EasyMock.createNiceMock(Principal.class);
 +    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
 +    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
 +
 +    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
 +    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
 +
 +    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
 +    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
 +
 +    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +    ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
 +    CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
 +
 +    EasyMock.replay(principal, services, context, request);
 +
 +    WebSSOResource webSSOResponse = new WebSSOResource();
 +    webSSOResponse.request = request;
 +    webSSOResponse.response = responseWrapper;
 +    webSSOResponse.context = context;
 +    webSSOResponse.init();
 +
 +    // Issue a token
 +    webSSOResponse.doGet();
 +
 +    // Check the cookie
 +    Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
 +    assertNotNull(cookie);
 +
 +    JWTToken parsedToken = new JWTToken(cookie.getValue());
 +    assertEquals("alice", parsedToken.getSubject());
 +    assertTrue(authority.verifyToken(parsedToken));
 +  }
 +
 +  @Test
 +  public void testAudiences() throws Exception {
 +
 +    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
 +    EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn("recipient1,recipient2");
 +    EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
 +
 +    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +    EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
 +    EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
 +    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
 +
 +    Principal principal = EasyMock.createNiceMock(Principal.class);
 +    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
 +    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
 +
 +    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
 +    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
 +
 +    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
 +    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
 +
 +    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +    ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
 +    CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
 +
 +    EasyMock.replay(principal, services, context, request);
 +
 +    WebSSOResource webSSOResponse = new WebSSOResource();
 +    webSSOResponse.request = request;
 +    webSSOResponse.response = responseWrapper;
 +    webSSOResponse.context = context;
 +    webSSOResponse.init();
 +
 +    // Issue a token
 +    webSSOResponse.doGet();
 +
 +    // Check the cookie
 +    Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
 +    assertNotNull(cookie);
 +
 +    JWTToken parsedToken = new JWTToken(cookie.getValue());
 +    assertEquals("alice", parsedToken.getSubject());
 +    assertTrue(authority.verifyToken(parsedToken));
 +
 +    // Verify the audiences
 +    List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
 +    assertEquals(2, audiences.size());
 +    assertTrue(audiences.contains("recipient1"));
 +    assertTrue(audiences.contains("recipient2"));
 +  }
 +
 +  /**
 +   * A wrapper for HttpServletResponseWrapper to store the cookies
 +   */
 +  private static class CookieResponseWrapper extends HttpServletResponseWrapper {
 +
 +    private ServletOutputStream outputStream;
 +    private Map<String, Cookie> cookies = new HashMap<>();
 +
 +    public CookieResponseWrapper(HttpServletResponse response) {
 +        super(response);
 +    }
 +
 +    public CookieResponseWrapper(HttpServletResponse response, ServletOutputStream outputStream) {
 +        super(response);
 +        this.outputStream = outputStream;
 +    }
 +
 +    @Override
 +    public ServletOutputStream getOutputStream() {
 +        return outputStream;
 +    }
 +
 +    @Override
 +    public void addCookie(Cookie cookie) {
 +        super.addCookie(cookie);
 +        cookies.put(cookie.getName(), cookie);
 +    }
 +
 +    public Cookie getCookie(String name) {
 +        return cookies.get(name);
 +    }
 +
 +  }
 +
 +  private static class TestJWTokenAuthority implements JWTokenAuthority {
 +
 +    private RSAPublicKey publicKey;
 +    private RSAPrivateKey privateKey;
 +
 +    public TestJWTokenAuthority(RSAPublicKey publicKey, RSAPrivateKey privateKey) {
 +      this.publicKey = publicKey;
 +      this.privateKey = privateKey;
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Subject subject, String algorithm)
++    public JWT issueToken(Subject subject, String algorithm)
 +      throws TokenServiceException {
 +      Principal p = (Principal) subject.getPrincipals().toArray()[0];
 +      return issueToken(p, algorithm);
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, String algorithm)
++    public JWT issueToken(Principal p, String algorithm)
 +      throws TokenServiceException {
 +      return issueToken(p, null, algorithm);
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, String audience, String algorithm)
++    public JWT issueToken(Principal p, String audience, String algorithm)
 +      throws TokenServiceException {
 +      return issueToken(p, audience, algorithm, -1);
 +    }
 +
 +    @Override
-     public boolean verifyToken(JWTToken token) throws TokenServiceException {
++    public boolean verifyToken(JWT token) throws TokenServiceException {
 +      JWSVerifier verifier = new RSASSAVerifier(publicKey);
 +      return token.verify(verifier);
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, String audience, String algorithm,
++    public JWT issueToken(Principal p, String audience, String algorithm,
 +                               long expires) throws TokenServiceException {
 +      List<String> audiences = null;
 +      if (audience != null) {
 +        audiences = new ArrayList<String>();
 +        audiences.add(audience);
 +      }
 +      return issueToken(p, audiences, algorithm, expires);
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
++    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
 +                               long expires) throws TokenServiceException {
 +      String[] claimArray = new String[4];
 +      claimArray[0] = "KNOXSSO";
 +      claimArray[1] = p.getName();
 +      claimArray[2] = null;
 +      if (expires == -1) {
 +        claimArray[3] = null;
 +      } else {
 +        claimArray[3] = String.valueOf(expires);
 +      }
 +
 +      JWTToken token = null;
 +      if ("RS256".equals(algorithm)) {
 +        token = new JWTToken("RS256", claimArray, audiences);
 +        JWSSigner signer = new RSASSASigner(privateKey);
 +        token.sign(signer);
 +      } else {
 +        throw new TokenServiceException("Cannot issue token - Unsupported algorithm");
 +      }
 +
 +      return token;
 +    }
 +
 +    @Override
 +    public JWT issueToken(Principal p, String algorithm, long expiry)
 +        throws TokenServiceException {
 +      return issueToken(p, Collections.<String>emptyList(), algorithm, expiry);
 +    }
 +
 +    @Override
-     public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
++    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
 +      JWSVerifier verifier = new RSASSAVerifier(publicKey);
 +      return token.verify(verifier);
 +    }
 +
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --cc gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
index dc76005,0000000..224eb1c
mode 100644,000000..100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@@ -1,307 -1,0 +1,307 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.service.knoxtoken;
 +
 +import org.apache.knox.gateway.service.knoxtoken.TokenResource;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
 +import org.apache.knox.gateway.services.security.token.TokenServiceException;
 +import org.apache.knox.gateway.services.security.token.impl.JWT;
 +import org.apache.knox.gateway.services.security.token.impl.JWTToken;
 +import org.easymock.EasyMock;
 +import org.junit.Assert;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +
 +import com.nimbusds.jose.JWSSigner;
 +import com.nimbusds.jose.JWSVerifier;
 +import com.nimbusds.jose.crypto.RSASSASigner;
 +import com.nimbusds.jose.crypto.RSASSAVerifier;
 +
 +import java.util.Map;
 +
 +import javax.security.auth.Subject;
 +import javax.servlet.ServletContext;
 +import javax.servlet.http.HttpServletRequest;
 +import javax.servlet.http.HttpServletResponse;
 +import javax.ws.rs.core.Response;
 +
 +import static org.junit.Assert.*;
 +
 +import java.io.PrintWriter;
 +import java.io.StringWriter;
 +import java.security.KeyPair;
 +import java.security.KeyPairGenerator;
 +import java.security.NoSuchAlgorithmException;
 +import java.security.Principal;
 +import java.security.interfaces.RSAPrivateKey;
 +import java.security.interfaces.RSAPublicKey;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +
 +/**
 + * Some tests for the token service
 + */
 +public class TokenServiceResourceTest {
 +
 +  protected static RSAPublicKey publicKey;
 +  protected static RSAPrivateKey privateKey;
 +
 +  @BeforeClass
 +  public static void setup() throws Exception, NoSuchAlgorithmException {
 +    KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
 +    kpg.initialize(1024);
 +    KeyPair KPair = kpg.generateKeyPair();
 +
 +    publicKey = (RSAPublicKey) KPair.getPublic();
 +    privateKey = (RSAPrivateKey) KPair.getPrivate();
 +  }
 +
 +  @Test
 +  public void testTokenService() throws Exception {
 +    Assert.assertTrue(true);
 +  }
 +
 +  @Test
 +  public void testClientData() throws Exception {
 +    TokenResource tr = new TokenResource();
 +
 +    Map<String,Object> clientDataMap = new HashMap<>();
 +    tr.addClientDataToMap("cookie.name=hadoop-jwt,test=value".split(","), clientDataMap);
 +    Assert.assertTrue(clientDataMap.size() == 2);
 +
 +    clientDataMap = new HashMap<>();
 +    tr.addClientDataToMap("cookie.name=hadoop-jwt".split(","), clientDataMap);
 +    Assert.assertTrue(clientDataMap.size() == 1);
 +
 +    clientDataMap = new HashMap<>();
 +    tr.addClientDataToMap("".split(","), clientDataMap);
 +    Assert.assertTrue(clientDataMap.size() == 0);
 +  }
 +
 +  @Test
 +  public void testGetToken() throws Exception {
 +    TokenResource tr = new TokenResource();
 +
 +    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
 +    //tr.context = context;
 +    // tr.init();
 +
 +    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
 +    Principal principal = EasyMock.createNiceMock(Principal.class);
 +    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
 +    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
 +
 +    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
 +    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
 +
 +    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
 +    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
 +
 +    StringWriter writer = new StringWriter();
 +    PrintWriter printWriter = new PrintWriter(writer);
 +    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +    EasyMock.expect(response.getWriter()).andReturn(printWriter);
 +
 +    EasyMock.replay(principal, services, context, request, response);
 +
 +    tr.request = request;
 +    tr.response = response;
 +
 +    // Issue a token
 +    Response retResponse = tr.doGet();
 +
 +    assertEquals(200, retResponse.getStatus());
 +
 +    // Parse the response
 +    String retString = writer.toString();
 +    String accessToken = getTagValue(retString, "access_token");
 +    assertNotNull(accessToken);
 +    String expiry = getTagValue(retString, "expires_in");
 +    assertNotNull(expiry);
 +
 +    // Verify the token
 +    JWTToken parsedToken = new JWTToken(accessToken);
 +    assertEquals("alice", parsedToken.getSubject());
 +    assertTrue(authority.verifyToken(parsedToken));
 +  }
 +
 +  @Test
 +  public void testAudiences() throws Exception {
 +
 +    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
 +    EasyMock.expect(context.getInitParameter("knox.token.audiences")).andReturn("recipient1,recipient2");
 +    EasyMock.expect(context.getInitParameter("knox.token.ttl")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knox.token.target.url")).andReturn(null);
 +    EasyMock.expect(context.getInitParameter("knox.token.client.data")).andReturn(null);
 +
 +    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
 +    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
 +    Principal principal = EasyMock.createNiceMock(Principal.class);
 +    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
 +    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
 +
 +    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
 +    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
 +
 +    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
 +    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
 +
 +    StringWriter writer = new StringWriter();
 +    PrintWriter printWriter = new PrintWriter(writer);
 +    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
 +    EasyMock.expect(response.getWriter()).andReturn(printWriter);
 +
 +    EasyMock.replay(principal, services, context, request, response);
 +
 +    TokenResource tr = new TokenResource();
 +    tr.request = request;
 +    tr.response = response;
 +    tr.context = context;
 +    tr.init();
 +
 +    // Issue a token
 +    Response retResponse = tr.doGet();
 +
 +    assertEquals(200, retResponse.getStatus());
 +
 +    // Parse the response
 +    String retString = writer.toString();
 +    String accessToken = getTagValue(retString, "access_token");
 +    assertNotNull(accessToken);
 +    String expiry = getTagValue(retString, "expires_in");
 +    assertNotNull(expiry);
 +
 +    // Verify the token
 +    JWTToken parsedToken = new JWTToken(accessToken);
 +    assertEquals("alice", parsedToken.getSubject());
 +    assertTrue(authority.verifyToken(parsedToken));
 +
 +    // Verify the audiences
 +    List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
 +    assertEquals(2, audiences.size());
 +    assertTrue(audiences.contains("recipient1"));
 +    assertTrue(audiences.contains("recipient2"));
 +  }
 +
 +  private String getTagValue(String token, String tagName) {
 +    String searchString = tagName + "\":";
 +    String value = token.substring(token.indexOf(searchString) + searchString.length());
 +    if (value.startsWith("\"")) {
 +      value = value.substring(1);
 +    }
 +    if (value.contains("\"")) {
 +      return value.substring(0, value.indexOf("\""));
 +    } else if (value.contains(",")) {
 +      return value.substring(0, value.indexOf(","));
 +    } else {
 +      return value.substring(0, value.length() - 1);
 +    }
 +  }
 +
 +  private static class TestJWTokenAuthority implements JWTokenAuthority {
 +
 +    private RSAPublicKey publicKey;
 +    private RSAPrivateKey privateKey;
 +
 +    public TestJWTokenAuthority(RSAPublicKey publicKey, RSAPrivateKey privateKey) {
 +      this.publicKey = publicKey;
 +      this.privateKey = privateKey;
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Subject subject, String algorithm)
++    public JWT issueToken(Subject subject, String algorithm)
 +      throws TokenServiceException {
 +      Principal p = (Principal) subject.getPrincipals().toArray()[0];
 +      return issueToken(p, algorithm);
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, String algorithm)
++    public JWT issueToken(Principal p, String algorithm)
 +      throws TokenServiceException {
 +      return issueToken(p, null, algorithm);
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, String audience, String algorithm)
++    public JWT issueToken(Principal p, String audience, String algorithm)
 +      throws TokenServiceException {
 +      return issueToken(p, audience, algorithm, -1);
 +    }
 +
 +    @Override
-     public boolean verifyToken(JWTToken token) throws TokenServiceException {
++    public boolean verifyToken(JWT token) throws TokenServiceException {
 +      JWSVerifier verifier = new RSASSAVerifier(publicKey);
 +      return token.verify(verifier);
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, String audience, String algorithm,
++    public JWT issueToken(Principal p, String audience, String algorithm,
 +                               long expires) throws TokenServiceException {
 +      ArrayList<String> audiences = null;
 +      if (audience != null) {
 +        audiences = new ArrayList<String>();
 +        audiences.add(audience);
 +      }
 +      return issueToken(p, audiences, algorithm, expires);
 +    }
 +
 +    @Override
-     public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
++    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
 +                               long expires) throws TokenServiceException {
 +      String[] claimArray = new String[4];
 +      claimArray[0] = "KNOXSSO";
 +      claimArray[1] = p.getName();
 +      claimArray[2] = null;
 +      if (expires == -1) {
 +        claimArray[3] = null;
 +      } else {
 +        claimArray[3] = String.valueOf(expires);
 +      }
 +
 +      JWTToken token = null;
 +      if ("RS256".equals(algorithm)) {
 +        token = new JWTToken("RS256", claimArray, audiences);
 +        JWSSigner signer = new RSASSASigner(privateKey);
 +        token.sign(signer);
 +      } else {
 +        throw new TokenServiceException("Cannot issue token - Unsupported algorithm");
 +      }
 +
 +      return token;
 +    }
 +
 +    @Override
 +    public JWT issueToken(Principal p, String algorithm, long expiry)
 +        throws TokenServiceException {
 +      return issueToken(p, Collections.<String>emptyList(), algorithm, expiry);
 +    }
 +
 +    @Override
-     public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
++    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
 +      JWSVerifier verifier = new RSASSAVerifier(publicKey);
 +      return token.verify(verifier);
 +    }
 +
 +  }
 +
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
index f01bd20,0000000..0ed7556
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
@@@ -1,300 -1,0 +1,302 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.config;
 +
 +import java.net.InetSocketAddress;
 +import java.net.UnknownHostException;
 +import java.util.List;
 +import java.util.Map;
 +
 +public interface GatewayConfig {
 +
 +  // Used as the basis for any home directory that is not specified.
 +  static final String GATEWAY_HOME_VAR = "GATEWAY_HOME";
 +
 +  // Variable name for the location of configuration files edited by users
 +  static final String GATEWAY_CONF_HOME_VAR = "GATEWAY_CONF_HOME";
 +
 +  // Variable name for the location of data files generated by the gateway at runtime.
 +  static final String GATEWAY_DATA_HOME_VAR = "GATEWAY_DATA_HOME";
 +
 +  public static final String GATEWAY_CONFIG_ATTRIBUTE = "org.apache.knox.gateway.config";
 +  public static final String HADOOP_KERBEROS_SECURED = "gateway.hadoop.kerberos.secured";
 +  public static final String KRB5_CONFIG = "java.security.krb5.conf";
 +  public static final String KRB5_DEBUG = "sun.security.krb5.debug";
 +  public static final String KRB5_LOGIN_CONFIG = "java.security.auth.login.config";
 +  public static final String KRB5_USE_SUBJECT_CREDS_ONLY = "javax.security.auth.useSubjectCredsOnly";
 +  public static final String SIGNING_KEYSTORE_NAME = "gateway.signing.keystore.name";
 +  public static final String SIGNING_KEY_ALIAS = "gateway.signing.key.alias";
 +
 +  /**
 +   * The location of the gateway configuration.
 +   * Subdirectories will be: topologies
 +   * @return The location of the gateway configuration.
 +   */
 +  String getGatewayConfDir();
 +
 +  /**
 +   * The location of the gateway runtime generated data.
 +   * Subdirectories will be security, deployments
 +   * @return The location of the gateway runtime generated data.
 +   */
 +  String getGatewayDataDir();
 +
 +  /**
 +   * The location of the gateway services definition's root directory
 +   * @return The location of the gateway services top level directory.
 +   */
 +  String getGatewayServicesDir();
 +
 +  /**
 +   * The location of the gateway applications's root directory
 +   * @return The location of the gateway applications top level directory.
 +   */
 +  String getGatewayApplicationsDir();
 +
 +  String getHadoopConfDir();
 +
 +  String getGatewayHost();
 +
 +  int getGatewayPort();
 +
 +  String getGatewayPath();
 +
 +  String getGatewayTopologyDir();
 +
 +  String getGatewaySecurityDir();
 +
 +  String getGatewayDeploymentDir();
 +
 +  InetSocketAddress getGatewayAddress() throws UnknownHostException;
 +
 +  boolean isSSLEnabled();
 +  
 +  List<String> getExcludedSSLProtocols();
 +
 +  List<String> getIncludedSSLCiphers();
 +
 +  List<String> getExcludedSSLCiphers();
 +
 +  boolean isHadoopKerberosSecured();
 +
 +  String getKerberosConfig();
 +
 +  boolean isKerberosDebugEnabled();
 +
 +  String getKerberosLoginConfig();
 +
 +  String getDefaultTopologyName();
 +
 +  String getDefaultAppRedirectPath();
 +
 +  String getFrontendUrl();
 +
 +  boolean isClientAuthNeeded();
 +
++  boolean isClientAuthWanted();
++
 +  String getTruststorePath();
 +
 +  boolean getTrustAllCerts();
 +
 +  String getKeystoreType();
 +
 +  String getTruststoreType();
 +
 +  boolean isXForwardedEnabled();
 +
 +  String getEphemeralDHKeySize();
 +
 +  int getHttpClientMaxConnections();
 +
 +  int getHttpClientConnectionTimeout();
 +
 +  int getHttpClientSocketTimeout();
 +
 +  int getThreadPoolMax();
 +
 +  int getHttpServerRequestBuffer();
 +
 +  int getHttpServerRequestHeaderBuffer();
 +
 +  int getHttpServerResponseBuffer();
 +
 +  int getHttpServerResponseHeaderBuffer();
 +
 +  int getGatewayDeploymentsBackupVersionLimit();
 +
 +  long getGatewayDeploymentsBackupAgeLimit();
 +
 +  long getGatewayIdleTimeout();
 +
 +  String getSigningKeystoreName();
 +
 +  String getSigningKeyAlias();
 +
 +  List<String> getGlobalRulesServices();
 +
 +  /**
 +   * Returns true if websocket feature enabled else false.
 +   * Default is false.
 +   * @since 0.10
 +   * @return
 +   */
 +  boolean isWebsocketEnabled();
 +
 +  /**
 +   * Websocket connection max text message size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketMaxTextMessageSize();
 +
 +  /**
 +   * Websocket connection max binary message size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketMaxBinaryMessageSize();
 +
 +  /**
 +   * Websocket connection max text message buffer size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketMaxTextMessageBufferSize();
 +
 +  /**
 +   * Websocket connection max binary message buffer size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketMaxBinaryMessageBufferSize();
 +
 +  /**
 +   * Websocket connection input buffer size.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketInputBufferSize();
 +
 +  /**
 +   * Websocket connection async write timeout.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketAsyncWriteTimeout();
 +
 +  /**
 +   * Websocket connection idle timeout.
 +   * @since 0.10
 +   * @return
 +   */
 +  int getWebsocketIdleTimeout();
 +
 +  boolean isMetricsEnabled();
 +
 +  boolean isJmxMetricsReportingEnabled();
 +
 +  boolean isGraphiteMetricsReportingEnabled();
 +
 +  String getGraphiteHost();
 +
 +  int getGraphitePort();
 +
 +  int getGraphiteReportingFrequency();
 +
 +  /**
 +   * List of MIME Type to be compressed.
 +   * @since 0.12
 +   */
 +  List<String> getMimeTypesToCompress();
 +
 +  /**
 +   * Enable cookie scoping to gateway path
 +   *
 +   * @since 0.13
 +   */
 +  boolean isCookieScopingToPathEnabled();
 +
 +  /**
 +   * Configured name of the HTTP Header that is expected
 +   * to be set by a proxy in front of the gateway.
 +   * @return
 +   */
 +  String getHeaderNameForRemoteAddress();
 +
 +  /**
 +   * Configured Algorithm name to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getAlgorithm();
 +
 +  /**
 +   * Configured Algorithm name to be used by the CryptoService
 +   * for password based encryption
 +   * @return
 +   */
 +  String getPBEAlgorithm();
 +
 +  /**
 +   * Configured Transformation name to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getTransformation();
 +
 +  /**
 +   * Configured SaltSize to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getSaltSize();
 +
 +  /**
 +   * Configured IterationCount to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getIterationCount();
 +
 +  /**
 +   * Configured KeyLength to be used by the CryptoService
 +   * and MasterService implementations
 +   * @return
 +   */
 +  String getKeyLength();
 +
 +  /**
 +   * Map of Topology names and their ports.
 +   *
 +   * @return
 +   */
 +  Map<String, Integer> getGatewayPortMappings();
 +
 +  /**
 +   * Is the Port Mapping feature on
 +   * @return
 +   */
 +  boolean isGatewayPortMappingEnabled();
 +
 +  /**
 +   * Is the Server header suppressed
 +   * @return
 +   */
 +  boolean isGatewayServerHeaderEnabled();
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/JWTokenAuthority.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/JWTokenAuthority.java
index 17c9016,0000000..7bdcb69
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/JWTokenAuthority.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/JWTokenAuthority.java
@@@ -1,52 -1,0 +1,51 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services.security.token;
 +
 +import java.security.Principal;
 +import java.security.interfaces.RSAPublicKey;
 +import java.util.List;
 +
 +import javax.security.auth.Subject;
 +
 +import org.apache.knox.gateway.services.security.token.impl.JWT;
- import org.apache.knox.gateway.services.security.token.impl.JWTToken;
 +
 +public interface JWTokenAuthority {
 +
-   JWTToken issueToken(Subject subject, String algorithm)
++  JWT issueToken(Subject subject, String algorithm)
 +      throws TokenServiceException;
 +
-   JWTToken issueToken(Principal p, String algorithm)
++  JWT issueToken(Principal p, String algorithm)
 +      throws TokenServiceException;
 +
-   JWTToken issueToken(Principal p, String audience,
++  JWT issueToken(Principal p, String audience,
 +      String algorithm) throws TokenServiceException;
 +
-   boolean verifyToken(JWTToken token) throws TokenServiceException;
++  boolean verifyToken(JWT token) throws TokenServiceException;
 +
-   boolean verifyToken(JWTToken token, RSAPublicKey publicKey)
++  boolean verifyToken(JWT token, RSAPublicKey publicKey)
 +      throws TokenServiceException;
 +
-   JWTToken issueToken(Principal p, String audience, String algorithm,
-       long expires) throws TokenServiceException;
++  JWT issueToken(Principal p, String algorithm, long expires) throws TokenServiceException;
 +
-   JWT issueToken(Principal p, String audience, long l) throws TokenServiceException;
++  JWT issueToken(Principal p, String audience, String algorithm,
++      long expires) throws TokenServiceException;
 +
-   JWTToken issueToken(Principal p, List<String> audience, String algorithm,
++  JWT issueToken(Principal p, List<String> audience, String algorithm,
 +      long expires) throws TokenServiceException;
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/b3107e91/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWT.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWT.java
index e906c11,0000000..8638da5
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWT.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/services/security/token/impl/JWT.java
@@@ -1,60 -1,0 +1,63 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services.security.token.impl;
 +
 +import java.util.Date;
 +
 +import com.nimbusds.jose.JWSSigner;
++import com.nimbusds.jose.JWSVerifier;
 +
 +public interface JWT {
 +
-   public static final String PRINCIPAL = "prn";
-   public static final String SUBJECT = "sub";
-   public static final String ISSUER = "iss";
-   public static final String AUDIENCE = "aud";
-   public static final String EXPIRES = "exp";
++  String PRINCIPAL = "prn";
++  String SUBJECT = "sub";
++  String ISSUER = "iss";
++  String AUDIENCE = "aud";
++  String EXPIRES = "exp";
 +
-   public abstract String getPayload();
++  String getPayload();
 +
-   public abstract void setSignaturePayload(byte[] payload);
++  void setSignaturePayload(byte[] payload);
 +
-   public abstract byte[] getSignaturePayload();
++  byte[] getSignaturePayload();
 +
-   public abstract String getClaim(String claimName);
++  String getClaim(String claimName);
 +
-   public abstract String getPrincipal();
++  String getPrincipal();
 +
-   public abstract String getIssuer();
++  String getIssuer();
 +
-   public abstract String getAudience();
++  String getAudience();
 +
 +  public String[] getAudienceClaims();
 +
-   public abstract String getExpires();
++  String getExpires();
 +
-   public abstract Date getExpiresDate();
++  Date getExpiresDate();
 +
-   public abstract String getSubject();
++  String getSubject();
 +
-   public abstract String getHeader();
++  String getHeader();
 +
-   public abstract String getClaims();
++  String getClaims();
 +
-   public abstract void sign(JWSSigner signer);
++  void sign(JWSSigner signer);
++
++  boolean verify(JWSVerifier verifier);
 +
 +}


[09/22] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
new file mode 100644
index 0000000..1e5e7b2
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -0,0 +1,856 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+
+/**
+ * Test the Ambari ServiceDiscovery implementation.
+ *
+ * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
+ *      treatment of the responses as they were observed at the time the tests are developed.
+ */
+public class AmbariServiceDiscoveryTest {
+
+    @Test
+    public void testSingleClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "testCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster);
+    }
+
+
+    @Test
+    public void testBulkClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "anotherCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
+        assertNotNull(clusters);
+        assertEquals(1, clusters.size());
+        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
+        final String[] services = new String[]{"NAMENODE",
+                                               "JOBTRACKER",
+                                               "WEBHDFS",
+                                               "WEBHCAT",
+                                               "OOZIE",
+                                               "WEBHBASE",
+                                               "HIVE",
+                                               "RESOURCEMANAGER"};
+        printServiceURLs(cluster, services);
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            StringBuilder sb = new StringBuilder();
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    sb.append(url);
+                    sb.append(" ");
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, sb.toString()));
+        }
+    }
+
+
+    /**
+     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
+     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
+     */
+    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
+
+        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
+
+        private Map<String, JSONObject> cannedResponses = new HashMap<>();
+
+        TestAmbariServiceDiscovery(String clusterName) {
+            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
+                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                               clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
+                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
+                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                     clusterName)));
+        }
+
+        @Override
+        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+            return cannedResponses.get(url.substring(url.indexOf("/api")));
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////
+    //  JSON response templates, based on actual response content excerpts
+    ////////////////////////////////////////////////////////////////////////
+
+    private static final String CLUSTERS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"Clusters\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"version\" : \"HDP-2.6\"\n" +
+    "      }\n" +
+    "    }\n" +
+    "  ]" +
+    "}";
+
+
+    private static final String HOSTROLES_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HBASE_MASTER\",\n" +
+    "            \"service_name\" : \"HBASE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HBASE_MASTER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HBASE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HDFS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NAMENODE\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HIVE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HCAT\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HCAT\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"OOZIE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "            \"service_name\" : \"OOZIE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"OOZIE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"YARN\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NODEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NODEMANAGER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "                \"ha_state\" : \"ACTIVE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"ZOOKEEPER\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "            \"service_name\" : \"ZOOKEEPER\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}\n";
+
+
+    private static final String SERVICECONFIGS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hbase-site\",\n" +
+    "          \"tag\" : \"version1503410563715\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
+    "            \"hbase.master.info.port\" : \"16010\",\n" +
+    "            \"hbase.master.port\" : \"16000\",\n" +
+    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
+    "            \"hbase.regionserver.port\" : \"16020\",\n" +
+    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
+    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
+    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
+    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
+    "      \"service_name\" : \"HBASE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hdfs-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
+    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
+    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
+    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
+    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
+    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"dfs.https.port\" : \"50470\",\n" +
+    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
+    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
+    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
+    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
+    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
+    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
+    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
+    "              \"dfs.namenode.http-address\" : \"true\",\n" +
+    "              \"dfs.support.append\" : \"true\",\n" +
+    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
+    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
+    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"core-site\",\n" +
+    "          \"tag\" : \"version1502131215159\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
+    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"fs.defaultFS\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 2,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HDFS\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-env\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive_security_authorization\" : \"None\",\n" +
+    "            \"webhcat_user\" : \"hcat\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hiveserver2-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
+    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
+    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
+    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
+    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
+    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
+    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
+    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
+    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
+    "            \"hive.server2.webui.port\" : \"10502\",\n" +
+    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
+    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
+    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-site\",\n" +
+    "          \"tag\" : \"version1502130841736\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
+    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
+    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
+    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
+    "            \"hive.server2.authentication\" : \"NONE\",\n" +
+    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
+    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
+    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
+    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
+    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
+    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
+    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
+    "            \"hive.server2.transport.mode\" : \"http\",\n" +
+    "            \"hive.server2.use.SSL\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"hidden\" : {\n" +
+    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"webhcat-site\",\n" +
+    "          \"tag\" : \"version1502131111746\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"templeton.port\" : \"50111\",\n" +
+    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
+    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502131110745,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HIVE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"oozie-site\",\n" +
+    "          \"tag\" : \"version1502131137103\",\n" +
+    "          \"version\" : 3,\n" +
+    "          \"properties\" : {\n" +
+    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_name\" : \"OOZIE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502122253525,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
+    "      \"service_name\" : \"TEZ\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"yarn-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
+    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"yarn.acl.enable\" : \"false\",\n" +
+    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
+    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
+    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
+    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
+    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
+    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
+    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
+    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
+    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
+    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_name\" : \"YARN\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-release/home/conf/descriptors/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/descriptors/README b/b/gateway-release/home/conf/descriptors/README
new file mode 100644
index 0000000..a2e5226
--- /dev/null
+++ b/b/gateway-release/home/conf/descriptors/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SIMPLE TOPOLOGY DESCRIPTORS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-release/home/conf/shared-providers/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/shared-providers/README b/b/gateway-release/home/conf/shared-providers/README
new file mode 100644
index 0000000..44d12a3
--- /dev/null
+++ b/b/gateway-release/home/conf/shared-providers/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SHARED PROVIDER CONFIGURATIONS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/pom.xml b/gateway-discovery-ambari/pom.xml
new file mode 100644
index 0000000..924e89c
--- /dev/null
+++ b/gateway-discovery-ambari/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.knox</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.14.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-discovery-ambari</artifactId>
+
+    <name>gateway-discovery-ambari</name>
+    <description>The extension to the gateway for service discovery using Apache Ambari.</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-spi</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
new file mode 100644
index 0000000..6eaabd3
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class AmbariCluster implements ServiceDiscovery.Cluster {
+
+    private String name = null;
+
+    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+    private Map<String, AmbariComponent> components = null;
+
+
+    AmbariCluster(String name) {
+        this.name = name;
+        components = new HashMap<String, AmbariComponent>();
+    }
+
+    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
+        if (!serviceConfigurations.keySet().contains(serviceName)) {
+            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+        }
+        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
+    }
+
+
+    void addComponent(AmbariComponent component) {
+        components.put(component.getName(), component);
+    }
+
+
+    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
+        ServiceConfiguration sc = null;
+        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+        if (configs != null) {
+            sc = configs.get(configurationType);
+        }
+        return sc;
+    }
+
+
+    Map<String, AmbariComponent> getComponents() {
+        return components;
+    }
+
+
+    AmbariComponent getComponent(String name) {
+        return components.get(name);
+    }
+
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+
+    @Override
+    public List<String> getServiceURLs(String serviceName) {
+        List<String> urls = new ArrayList<>();
+        urls.addAll(urlCreator.create(this, serviceName));
+        return urls;
+    }
+
+
+    static class ServiceConfiguration {
+
+        private String type;
+        private String version;
+        private Map<String, String> props;
+
+        ServiceConfiguration(String type, String version, Map<String, String> properties) {
+            this.type = type;
+            this.version = version;
+            this.props = properties;
+        }
+
+        public String getVersion() {
+            return version;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public Map<String, String> getProperties() {
+            return props;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
new file mode 100644
index 0000000..55257fb
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import java.util.List;
+import java.util.Map;
+
+class AmbariComponent {
+
+    private String clusterName = null;
+    private String serviceName = null;
+    private String name        = null;
+    private String version     = null;
+
+    private List<String> hostNames = null;
+
+    private Map<String, String> properties = null;
+
+    AmbariComponent(String              name,
+                    String              version,
+                    String              cluster,
+                    String              service,
+                    List<String>        hostNames,
+                    Map<String, String> properties) {
+        this.name = name;
+        this.serviceName = service;
+        this.clusterName = cluster;
+        this.version = version;
+        this.hostNames = hostNames;
+        this.properties = properties;
+    }
+
+    public String getVersion() {
+        return version;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String getServiceName() {
+        return serviceName;
+    }
+
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    public List<String> getHostNames() {
+        return hostNames;
+    }
+
+    public Map<String, String> getConfigProperties() {
+        return properties;
+    }
+
+    public String getConfigProperty(String propertyName) {
+        return properties.get(propertyName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
new file mode 100644
index 0000000..34f20a7
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.AliasServiceException;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+import java.util.*;
+
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "AMBARI";
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    // Map of component names to service configuration types
+    private static Map<String, String> componentServiceConfigs = new HashMap<>();
+    static {
+        componentServiceConfigs.put("NAMENODE", "hdfs-site");
+        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
+        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
+        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
+        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
+        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
+    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    @GatewayService
+    private AliasService aliasService;
+
+    private CloseableHttpClient httpClient = null;
+
+    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
+
+
+    AmbariServiceDiscovery() {
+        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+    }
+
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+        String discoveryAddress = config.getAddress();
+
+        // Invoke Ambari REST API to discover the available clusters
+        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+        // Parse the cluster names from the response, and perform the cluster discovery
+        JSONArray clusterItems = (JSONArray) json.get("items");
+        for (Object clusterItem : clusterItems) {
+            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+            try {
+                Cluster c = discover(config, clusterName);
+                clusters.put(clusterName, c);
+            } catch (Exception e) {
+                log.clusterDiscoveryError(clusterName, e);
+            }
+        }
+
+        return clusters;
+    }
+
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        AmbariCluster cluster = new AmbariCluster(clusterName);
+
+        Map<String, String> serviceComponents = new HashMap<>();
+
+        String discoveryAddress = config.getAddress();
+        String discoveryUser = config.getUser();
+        String discoveryPwdAlias = config.getPasswordAlias();
+
+        Map<String, List<String>> componentHostNames = new HashMap<>();
+        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+        if (hostRolesJSON != null) {
+            // Process the host roles JSON
+            JSONArray items = (JSONArray) hostRolesJSON.get("items");
+            for (Object obj : items) {
+                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+                for (Object component : components) {
+                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+                    for (Object hostComponent : hostComponents) {
+                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+                        String serviceName = (String) hostRoles.get("service_name");
+                        String componentName = (String) hostRoles.get("component_name");
+
+                        serviceComponents.put(componentName, serviceName);
+
+//                    String hostName = (String) hostRoles.get("host_name");
+                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
+                        log.discoveredServiceHost(serviceName, hostName);
+                        if (!componentHostNames.containsKey(componentName)) {
+                            componentHostNames.put(componentName, new ArrayList<String>());
+                        }
+                        componentHostNames.get(componentName).add(hostName);
+                    }
+                }
+            }
+        }
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<String, String>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                }
+            }
+        }
+
+        // Construct the AmbariCluster model
+        for (String componentName : serviceComponents.keySet()) {
+            String serviceName = serviceComponents.get(componentName);
+            List<String> hostNames = componentHostNames.get(componentName);
+
+            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+            String configType = componentServiceConfigs.get(componentName);
+            if (configType != null) {
+                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+                AmbariComponent c = new AmbariComponent(componentName,
+                                                        svcConfig.getVersion(),
+                                                        clusterName,
+                                                        serviceName,
+                                                        hostNames,
+                                                        svcConfig.getProperties());
+                cluster.addComponent(c);
+            }
+        }
+
+        return cluster;
+    }
+
+
+    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If not password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
new file mode 100644
index 0000000..caa16ed
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+public interface AmbariServiceDiscoveryMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error during cluster {0} discovery: {1}")
+    void clusterDiscoveryError(final String clusterName,
+                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation {0} failed: {1}")
+    void restInvocationError(final String url,
+                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+    void aliasServiceUserError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+    void aliasServicePasswordError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No user configured for Ambari service discovery.")
+    void aliasServiceUserNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No password configured for Ambari service discovery.")
+    void aliasServicePasswordNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Unexpected REST invocation response code for {0} : {1}")
+    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "REST invocation {0} yielded a response without any JSON.")
+    void noJSON(final String url);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation result: {0}")
+    void debugJSON(final String json);
+
+
+    @Message(level = MessageLevel.INFO,
+            text = "Discovered: Service: {0}, Host: {1}")
+    void discoveredServiceHost(final String serviceName, final String hostName);
+
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
new file mode 100644
index 0000000..723a786
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
+
+    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
+
+    @Override
+    public String getType() {
+        return AmbariServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new AmbariServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
new file mode 100644
index 0000000..0674642
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+import java.util.ArrayList;
+import java.util.List;
+
+class AmbariServiceURLCreator {
+
+    private static final String NAMENODE_SERVICE        = "NAMENODE";
+    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
+    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
+    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
+    private static final String OOZIE_SERVICE           = "OOZIE";
+    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
+    private static final String HIVE_SERVICE            = "HIVE";
+    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
+
+
+    /**
+     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
+     *
+     * @param cluster The cluster discovery results
+     * @param serviceName The name of a Hadoop service
+     *
+     * @return One or more endpoint URLs for the specified service.
+     */
+    public List<String> create(AmbariCluster cluster, String serviceName) {
+        List<String> result = null;
+
+        if (NAMENODE_SERVICE.equals(serviceName)) {
+            result = createNameNodeURL(cluster);
+        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
+            result = createJobTrackerURL(cluster);
+        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
+            result = createWebHDFSURL(cluster);
+        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
+            result = createWebHCatURL(cluster);
+        } else if (OOZIE_SERVICE.equals(serviceName)) {
+            result = createOozieURL(cluster);
+        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
+            result = createWebHBaseURL(cluster);
+        } else if (HIVE_SERVICE.equals(serviceName)) {
+            result = createHiveURL(cluster);
+        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
+            result = createResourceManagerURL(cluster);
+        }
+
+        return result;
+    }
+
+
+    private List<String> createNameNodeURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("NAMENODE");
+        if (comp != null) {
+            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createJobTrackerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
+        if (comp != null) {
+            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHDFSURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
+        if (sc != null) {
+            String address = sc.getProperties().get("dfs.namenode.http-address");
+            result.add("http://" + address + "/webhdfs");
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHCatURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
+        if (webhcat != null) {
+            String port = webhcat.getConfigProperty("templeton.port");
+            String host = webhcat.getHostNames().get(0);
+
+            result.add("http://" + host + ":" + port + "/templeton");
+        }
+        return result;
+    }
+
+
+    private List<String> createOozieURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
+        if (comp != null) {
+            result.add(comp.getConfigProperty("oozie.base.url"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHBaseURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
+        if (comp != null) {
+            for (String host : comp.getHostNames()) {
+                result.add("http://" + host + ":60080");
+            }
+        }
+
+        return result;
+    }
+
+
+    private List<String> createHiveURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
+        if (hive != null) {
+            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
+            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
+            String transport = hive.getConfigProperty("hive.server2.transport.mode");
+            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
+            String host = hive.getHostNames().get(0);
+
+            String scheme = null; // What is the scheme for the binary transport mode?
+            if ("http".equals(transport)) {
+                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
+            }
+
+            result.add(scheme + "://" + host + ":" + port + "/" + path);
+        }
+        return result;
+    }
+
+
+    private List<String> createResourceManagerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
+        if (resMan != null) {
+            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
+            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
+            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
+
+            result.add(scheme + "://" + webappAddress + "/ws");
+        }
+
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..1da4fc9
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file